diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..d7d9cae
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,13 @@
+# EditorConfig is awesome: https://EditorConfig.org
+
+root = true
+
+[*]
+end_of_line = lf
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+[*.py]
+charset = utf-8
+indent_style = space
+indent_size = 4
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 0000000..befa060
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,5 @@
+# Run code through yapf
+19a821d5f1ff9079f9a40d27553182a433a27834
+
+# Run code through black
+0d9e3581d57f376865f49ae62fe9171789beca56
diff --git a/.gitignore b/.gitignore
index 029341d..767654b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,90 +1,46 @@
+#
+# OS-specific
+#
+
 .DS_Store
-# Byte-compiled / optimized / DLL files
-__pycache__/
+
+#
+# Language specific
+#
+
+# Python
 *.py[cod]
-*$py.class
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-env/
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
 *.egg-info/
-.installed.cfg
-*.egg
-
-# PyInstaller
-#  Usually these files are written by a python script from a template
-#  before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*,cover
-.hypothesis/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
-#Ipython Notebook
-.ipynb_checkpoints
-
-# pyenv
-.python-version
-
-# PyCharm
-.idea/
-
-# IntelliJ
-*.iml
-
-# VSCode
-/.vscode
-
-# Python virtual environment
+/build/
 /.venv
 
-# antlion configuration files
+#
+# Editors
+#
+
+/.idea/
+/.vscode/
+
+#
+# antlion
+#
+
+# Configuration
 /*.json
 /*.yaml
 /config/
 
-# antlion runtime files
+# Generated during run-time
 /logs
 
 # Local development scripts
 /*.sh
+
+#
+# third_party
+#
+
+/third_party/*
+!/third_party/github.com/
+!/third_party/github.com/jd/tenacity
+/third_party/github.com/jd/tenacity/src
diff --git a/BUILD.gn b/BUILD.gn
new file mode 100644
index 0000000..f2aab56
--- /dev/null
+++ b/BUILD.gn
@@ -0,0 +1,228 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Declare Fuchsia build targets for using antlion from the Fuchsia tree.
+# Requires additional configuration of jiri fetch attributes from your Fuchsia
+# checkout:
+#   `jiri init -fetch-optional=antlion`
+
+import("//build/python/python_library.gni")
+
+# Tests for full build validation
+group("e2e_tests") {
+  testonly = true
+  public_deps = [ "src/antlion/tests:e2e_tests" ]
+}
+
+# Subset of tests to validate builds in under 15 minutes.
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [ "src/antlion/tests:e2e_tests_quick" ]
+}
+
+# Tests for at-desk custom validation
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [ "src/antlion/tests:e2e_tests_manual" ]
+}
+
+# deprecated: prefer e2e_tests_quick
+group("smoke_tests") {
+  testonly = true
+  public_deps = [ ":e2e_tests_quick" ]
+}
+
+# Unit tests only
+group("tests") {
+  testonly = true
+  public_deps = [ "runner:tests" ]
+}
+
+python_library("antlion") {
+  source_root = "//third_party/antlion/src/antlion"
+  sources = [
+    "__init__.py",
+    "base_test.py",
+    "bin/__init__.py",
+    "bin/act.py",
+    "capabilities/__init__.py",
+    "capabilities/ssh.py",
+    "config_parser.py",
+    "context.py",
+    "controllers/__init__.py",
+    "controllers/access_point.py",
+    "controllers/adb.py",
+    "controllers/adb_lib/__init__.py",
+    "controllers/adb_lib/error.py",
+    "controllers/android_device.py",
+    "controllers/android_lib/__init__.py",
+    "controllers/android_lib/errors.py",
+    "controllers/android_lib/events.py",
+    "controllers/android_lib/logcat.py",
+    "controllers/android_lib/services.py",
+    "controllers/android_lib/tel/__init__.py",
+    "controllers/android_lib/tel/tel_utils.py",
+    "controllers/ap_lib/__init__.py",
+    "controllers/ap_lib/ap_get_interface.py",
+    "controllers/ap_lib/ap_iwconfig.py",
+    "controllers/ap_lib/bridge_interface.py",
+    "controllers/ap_lib/dhcp_config.py",
+    "controllers/ap_lib/dhcp_server.py",
+    "controllers/ap_lib/extended_capabilities.py",
+    "controllers/ap_lib/hostapd.py",
+    "controllers/ap_lib/hostapd_ap_preset.py",
+    "controllers/ap_lib/hostapd_bss_settings.py",
+    "controllers/ap_lib/hostapd_config.py",
+    "controllers/ap_lib/hostapd_constants.py",
+    "controllers/ap_lib/hostapd_security.py",
+    "controllers/ap_lib/hostapd_utils.py",
+    "controllers/ap_lib/radio_measurement.py",
+    "controllers/ap_lib/radvd.py",
+    "controllers/ap_lib/radvd_config.py",
+    "controllers/ap_lib/radvd_constants.py",
+    "controllers/ap_lib/third_party_ap_profiles/__init__.py",
+    "controllers/ap_lib/third_party_ap_profiles/actiontec.py",
+    "controllers/ap_lib/third_party_ap_profiles/asus.py",
+    "controllers/ap_lib/third_party_ap_profiles/belkin.py",
+    "controllers/ap_lib/third_party_ap_profiles/linksys.py",
+    "controllers/ap_lib/third_party_ap_profiles/netgear.py",
+    "controllers/ap_lib/third_party_ap_profiles/securifi.py",
+    "controllers/ap_lib/third_party_ap_profiles/tplink.py",
+    "controllers/ap_lib/wireless_network_management.py",
+    "controllers/attenuator.py",
+    "controllers/attenuator_lib/__init__.py",
+    "controllers/attenuator_lib/_tnhelper.py",
+    "controllers/attenuator_lib/aeroflex/__init__.py",
+    "controllers/attenuator_lib/aeroflex/telnet.py",
+    "controllers/attenuator_lib/minicircuits/__init__.py",
+    "controllers/attenuator_lib/minicircuits/http.py",
+    "controllers/attenuator_lib/minicircuits/telnet.py",
+    "controllers/fastboot.py",
+    "controllers/fuchsia_device.py",
+    "controllers/fuchsia_lib/__init__.py",
+    "controllers/fuchsia_lib/base_lib.py",
+    "controllers/fuchsia_lib/device_lib.py",
+    "controllers/fuchsia_lib/ffx.py",
+    "controllers/fuchsia_lib/hardware_power_statecontrol_lib.py",
+    "controllers/fuchsia_lib/lib_controllers/__init__.py",
+    "controllers/fuchsia_lib/lib_controllers/netstack_controller.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_controller.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py",
+    "controllers/fuchsia_lib/location/__init__.py",
+    "controllers/fuchsia_lib/location/regulatory_region_lib.py",
+    "controllers/fuchsia_lib/logging_lib.py",
+    "controllers/fuchsia_lib/netstack/__init__.py",
+    "controllers/fuchsia_lib/netstack/netstack_lib.py",
+    "controllers/fuchsia_lib/package_server.py",
+    "controllers/fuchsia_lib/sl4f.py",
+    "controllers/fuchsia_lib/ssh.py",
+    "controllers/fuchsia_lib/utils_lib.py",
+    "controllers/fuchsia_lib/wlan_ap_policy_lib.py",
+    "controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py",
+    "controllers/fuchsia_lib/wlan_lib.py",
+    "controllers/fuchsia_lib/wlan_policy_lib.py",
+    "controllers/iperf_client.py",
+    "controllers/iperf_server.py",
+    "controllers/openwrt_ap.py",
+    "controllers/openwrt_lib/__init__.py",
+    "controllers/openwrt_lib/network_const.py",
+    "controllers/openwrt_lib/network_settings.py",
+    "controllers/openwrt_lib/openwrt_constants.py",
+    "controllers/openwrt_lib/wireless_config.py",
+    "controllers/openwrt_lib/wireless_settings_applier.py",
+    "controllers/pdu.py",
+    "controllers/pdu_lib/__init__.py",
+    "controllers/pdu_lib/digital_loggers/__init__.py",
+    "controllers/pdu_lib/digital_loggers/webpowerswitch.py",
+    "controllers/pdu_lib/synaccess/__init__.py",
+    "controllers/pdu_lib/synaccess/np02b.py",
+    "controllers/sl4a_lib/__init__.py",
+    "controllers/sl4a_lib/error_reporter.py",
+    "controllers/sl4a_lib/event_dispatcher.py",
+    "controllers/sl4a_lib/rpc_client.py",
+    "controllers/sl4a_lib/rpc_connection.py",
+    "controllers/sl4a_lib/sl4a_manager.py",
+    "controllers/sl4a_lib/sl4a_ports.py",
+    "controllers/sl4a_lib/sl4a_session.py",
+    "controllers/sl4a_lib/sl4a_types.py",
+    "controllers/sniffer.py",
+    "controllers/sniffer_lib/__init__.py",
+    "controllers/sniffer_lib/local/__init__.py",
+    "controllers/sniffer_lib/local/local_base.py",
+    "controllers/sniffer_lib/local/tcpdump.py",
+    "controllers/sniffer_lib/local/tshark.py",
+    "controllers/utils_lib/__init__.py",
+    "controllers/utils_lib/commands/__init__.py",
+    "controllers/utils_lib/commands/ip.py",
+    "controllers/utils_lib/commands/route.py",
+    "controllers/utils_lib/commands/shell.py",
+    "controllers/utils_lib/host_utils.py",
+    "controllers/utils_lib/ssh/__init__.py",
+    "controllers/utils_lib/ssh/connection.py",
+    "controllers/utils_lib/ssh/formatter.py",
+    "controllers/utils_lib/ssh/settings.py",
+    "dict_object.py",
+    "error.py",
+    "event/__init__.py",
+    "event/decorators.py",
+    "event/event.py",
+    "event/event_bus.py",
+    "event/event_subscription.py",
+    "event/subscription_bundle.py",
+    "event/subscription_handle.py",
+    "keys.py",
+    "libs/__init__.py",
+    "libs/logging/__init__.py",
+    "libs/logging/log_stream.py",
+    "libs/ota/__init__.py",
+    "libs/ota/ota_runners/__init__.py",
+    "libs/ota/ota_runners/ota_runner.py",
+    "libs/ota/ota_runners/ota_runner_factory.py",
+    "libs/ota/ota_tools/__init__.py",
+    "libs/ota/ota_tools/adb_sideload_ota_tool.py",
+    "libs/ota/ota_tools/ota_tool.py",
+    "libs/ota/ota_tools/ota_tool_factory.py",
+    "libs/ota/ota_tools/update_device_ota_tool.py",
+    "libs/ota/ota_updater.py",
+    "libs/proc/__init__.py",
+    "libs/proc/job.py",
+    "libs/proc/process.py",
+    "libs/yaml_writer.py",
+    "logger.py",
+    "net.py",
+    "records.py",
+    "signals.py",
+    "test_decorators.py",
+    "test_runner.py",
+    "test_utils/__init__.py",
+    "test_utils/abstract_devices/__init__.py",
+    "test_utils/abstract_devices/wlan_device.py",
+    "test_utils/abstract_devices/wmm_transceiver.py",
+    "test_utils/dhcp/__init__.py",
+    "test_utils/dhcp/base_test.py",
+    "test_utils/fuchsia/__init__.py",
+    "test_utils/fuchsia/utils.py",
+    "test_utils/fuchsia/wmm_test_cases.py",
+    "test_utils/net/__init__.py",
+    "test_utils/net/connectivity_const.py",
+    "test_utils/net/net_test_utils.py",
+    "test_utils/wifi/__init__.py",
+    "test_utils/wifi/base_test.py",
+    "test_utils/wifi/wifi_constants.py",
+    "test_utils/wifi/wifi_performance_test_utils/__init__.py",
+    "test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py",
+    "test_utils/wifi/wifi_performance_test_utils/brcm_utils.py",
+    "test_utils/wifi/wifi_performance_test_utils/ping_utils.py",
+    "test_utils/wifi/wifi_performance_test_utils/qcom_utils.py",
+    "test_utils/wifi/wifi_power_test_utils.py",
+    "test_utils/wifi/wifi_test_utils.py",
+    "tracelogger.py",
+    "utils.py",
+  ]
+  library_deps = [
+    "//third_party/mobly",
+    "//third_party/pyyaml:yaml",
+    "third_party/github.com/jd/tenacity",
+  ]
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a9c7f67..248b51f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,20 +10,65 @@
 
 ## [Unreleased]
 
+## 0.3.0 - 2023-05-17
+
+### Deprecated
+
+- **Support for ACTS JSON configs; instead, use Mobly YAML configs.** To
+ease this transition, upon running `act.py`, a compatible YAML config will be
+generated for you and placed next to your JSON config.
+- **The `act.py` binary; instead, invoke tests directly.** Upon running
+`act.py`, a deprecation warning will provide instructions for how to invoke
+antlion tests without act.py and with the newly generated YAML config.
+
 ### Added
 
+- Presubmit testing in [CV][CV] (aka CQ). All tests specified with the
+`qemu_env` environment will run before every antlion CL is submitted.
+- Postsubmit testing in [CI][CI]. See [Milo][builders] for an exhaustive list of
+builders.
+- [EditorConfig](https://editorconfig.org) file for consistent coding styles.
+Installing an EditorConfig plugin for your editor is highly recommended.
+
+[CV]: https://chromium.googlesource.com/infra/luci/luci-go/+/refs/heads/main/cv/README.md
+[CI]: https://chromium.googlesource.com/chromium/src/+/master/docs/tour_of_luci_ui.md
+[builders]: https://luci-milo.appspot.com/ui/search?q=antlion
+
 ### Changed
 
+- Default test execution from ACTS to Mobly. `antlion_host_test()` now invokes
+the test file directly using the Mobly test runner, rather than using `act.py`.
+  - All tests have been refactored to allow direct running with the Mobly test
+  runner.
+  - `act.py` now converts ACTS JSON config to compatible Mobly YAML config. The
+  resulting config is passed directly to Mobly's config parser. See notes for
+  this release's deprecations above.
+- Generate YAML config instead of JSON config from antlion-runner.
+- `FuchsiaDevice.authorized_file_loc` config field is now optional. This field
+is only used during `FlashTest`; it is not used when the device is already
+provisioned (e.g. when tests are dispatched in Fuchsia infrastructure).
+
 ### Removed
 
+- Unused controllers and tests (full list)
+
 ### Fixed
-
 [unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/heads/main
+- Failure to stop session_manager using ffx in `WlanRebootTest` ([@patricklu],
+[bug](http://b/267330535))
+- Failure to parse 'test_name' in DHCP configuration file in `Dhcpv4InteropTest`
+(invalid option) introduced by previous refactor ([@patricklu],
+[bug](http://b/232574848))
+- Logging for `Dhcpv4InteropTest` changed to utilize a temp file instead of
+/var/log/messages to fix test error with duplicate PID log messages
+([@patricklu], [bug](http://b/232574848))
 
-## [0.2.0] - 2022-01-03
+## [0.2.0] - 2023-01-03
 
 ### Added
 
+- Added snapshots before reboot and during test teardown in `WlanRebootTest`
+([@patricklu], [bug](http://b/273923552))
 - Download radvd logs from AP for debugging IPv6 address allocation
 - Optional `wlan_features` config field to `FuchsiaDevice` for declaring which
 WLAN features the device supports, such as BSS Transition Management
diff --git a/README.md b/README.md
index be529cf..7d5950b 100644
--- a/README.md
+++ b/README.md
@@ -11,7 +11,85 @@
 [Report Bug]: http://go/conn-test-bug
 [Request Feature]: http://b/issues/new?component=1182297&template=1680893
 
-## Getting Started
+## Getting started with QEMU
+
+The quickest way to run antlion is by using the Fuchsia QEMU emulator. This
+enables antlion tests that do not require hardware-specific capabilities like
+WLAN. This is especially useful to verify if antlion builds and runs without
+syntax errors. If you require WLAN capabilities, see
+[below](#running-with-a-physical-device).
+
+1. [Checkout Fuchsia](https://fuchsia.dev/fuchsia-src/get-started/get_fuchsia_source)
+
+2. Configure and build Fuchsia to run antlion tests virtually on QEMU
+
+   ```sh
+   fx set core.qemu-x64 \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args 'core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests_quick
+   fx build
+   ```
+
+3. In a separate terminal, run the emulator with networking enabled
+
+   ```sh
+   ffx emu stop && ffx emu start -H --net tap && ffx log
+   ```
+
+4. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+5. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/src/antlion/tests/examples:sl4f_sanity_test
+   ```
+
+## Running with a local physical device
+
+A physical device is required for most antlion tests, which rely on physical I/O
+such as WLAN and Bluetooth. Antlion is designed to make testing physical devices
+as easy, reliable, and reproducible as possible. The device will be discovered
+using mDNS, so make sure your host machine has a network connection to the
+device.
+
+1. Configure and build Fuchsia for your target with the following extra
+   arguments:
+
+   ```sh
+   fx set core.my-super-cool-product \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args='core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests
+   fx build
+   ```
+
+2. Flash your device with the new build
+
+3. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+4. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/src/antlion/tests/functional:ping_stress_test
+   ```
+
+> Local auxiliary devices are not yet support by `antlion-runner`, which is
+> responsible for generating Mobly configs. In the meantime, see the
+> section below for manually crafting Mobly configs to support auxiliary
+> devices.
+
+## Running without a Fuchsia checkout
 
 Requires Python 3.8+
 
@@ -25,52 +103,77 @@
 
    ```sh
    cd antlion
-   python3 -m venv .venv  # creates a "virtual environment" in the `.venv` directory
-   source .venv/bin/activate  # activates the virtual environment. Run `deactivate` to exit it later
-   pip install --editable ".[dev,test]"
+   python3 -m venv .venv      # Create a virtual environment in the `.venv` directory
+   source .venv/bin/activate  # Activate the virtual environment
+   pip install --editable ".[mdns]"
+   # Run `deactivate` later to exit the virtual environment
    ```
 
 3. Write the sample config and update the Fuchsia controller to match your
    development environment
 
    ```sh
-   mkdir -p config
-   cat <<EOF > config/simple.json
-   {
-      "testbed": [{
-         "name": "simple_testbed",
-         "FuchsiaDevice": [{
-            "ip": "fuchsia-00e0-4c01-04df"
-         }]
-      }],
-      "logpath": "logs"
-   }
+   cat <<EOF > simple-config.yaml
+   TestBeds:
+   - Name: antlion-runner
+     Controllers:
+       FuchsiaDevice:
+       - ip: fuchsia-00e0-4c01-04df
+   MoblyParams:
+     LogPath: logs
    EOF
    ```
 
+   Replace `fuchsia-00e0-4c01-04df` with your device's nodename, or
+   `fuchsia-emulator` if using an emulator. The nodename can be found by looking
+   for a log similar to the one below.
+
+   ```text
+   [0.524][klog][klog][I] netsvc: nodename='fuchsia-emulator'
+   ```
+
 4. Run the sanity test
 
    ```sh
-   antlion -c config/simple.json -tc Sl4fSanityTest
+   python src/antlion/tests/examples/Sl4fSanityTest.py -c simple-config.yaml
    ```
 
-See `antlion -h` for more full usage.
-
 ## Contributing
 
-Contributions are what make open source a great place to learn, inspire, and
-create. Any contributions you make are **greatly appreciated**.
+Contributions are what make open source projects a great place to learn,
+inspire, and create. Any contributions you make are **greatly appreciated**.
+If you have a suggestion that would make this better, please create a CL.
 
-If you have a suggestion that would make this better, please create a pull
-request.
+Before contributing, additional setup is necessary:
 
-1. Create a feature branch (`git checkout -b feature/amazing-feature`)
-2. Document your change in `CHANGELOG.md`
-3. Commit changes (`git commit -m 'Add some amazing feature'`)
-4. Upload CL (`git push origin HEAD:refs/for/main`)
+- Install developer Python packages for formatting and linting
+
+  ```sh
+  pip install --editable ".[dev]"
+  ```
+
+- Install an [EditorConfig](https://editorconfig.org/) plugin for consistent
+  whitespace
+
+- Install [Black](https://pypi.org/project/black/) our preferred code formatter.
+  Optionally, add the extension to your editor.
+
+- Complete the steps in '[Contribute source changes]' to gain authorization to
+  upload CLs to Fuchsia's Gerrit.
+
+To create a CL:
+
+1. Create a branch (`git checkout -b feature/amazing-feature`)
+2. Make changes
+3. Document the changes in `CHANGELOG.md`
+4. Run your change through `Black` formatter
+5. Commit changes (`git add . && git commit -m 'Add some amazing feature'`)
+6. Upload CL (`git push origin HEAD:refs/for/main`)
 
 > A public bug tracker is not (yet) available.
 
+[Contribute source changes]: https://fuchsia.dev/fuchsia-src/development/source_code/contribute_changes#prerequisites
+
 ### Recommended git aliases
 
 There are a handful of git commands that will be commonly used throughout the
@@ -87,6 +190,13 @@
   uc = push origin HEAD:refs/for/main%l=Commit-Queue+1,l=Fuchsia-Auto-Submit+1,publish-comments,r=sbalana
 ```
 
+You may also want to add a section to ignore the project's large formatting changes:
+
+```gitconfig
+[blame]
+  ignoreRevsFile = .git-blame-ignore-revs
+```
+
 ## License
 
 Distributed under the Apache 2.0 License. See `LICENSE` for more information.
diff --git a/antlion_host_test.gni b/antlion_host_test.gni
new file mode 100644
index 0000000..96f7654
--- /dev/null
+++ b/antlion_host_test.gni
@@ -0,0 +1,159 @@
+import("//build/python/python_binary.gni")
+import("//build/rust/rustc_binary.gni")
+import("//build/testing/host_test.gni")
+import("//build/testing/host_test_data.gni")
+
+# Declares a host-side antlion test.
+#
+# Example
+#
+# ```
+# antlion_host_test("Sl4fSanityTest") {
+#   main_source = "Sl4fSanityTest.py"
+# }
+# ```
+#
+# Parameters
+#
+#  main_source
+#    The .py file defining the antlion test.
+#    Type: path
+#
+#  sources (optional)
+#    Other files that are used in the test.
+#    Type: list(path)
+#    Default: empty list
+#
+#  test_params (optional)
+#    Path to a YAML file with additional test parameters. This will be provided
+#    to the test in the antlion config under the "test_params" key.
+#    Type: string
+#
+#  extra_args (optional)
+#    Additional arguments to pass to the test.
+#    Type: list(string)
+#
+#   deps
+#   environments
+#   visibility
+template("antlion_host_test") {
+  assert(defined(invoker.main_source), "main_source is required")
+
+  #
+  # Define antlion test python_binary().
+  #
+  _python_binary_name = "${target_name}.pyz"
+  _python_binary_target = "${target_name}_python_binary"
+  python_binary(_python_binary_target) {
+    forward_variables_from(invoker,
+                           [
+                             "main_source",
+                             "sources",
+                           ])
+    output_name = _python_binary_name
+    main_callable = "test_runner.main" # Mobly-specific entry point.
+    deps = [ "//third_party/antlion" ]
+    testonly = true
+    visibility = [ ":*" ]
+  }
+
+  _test_dir = "${root_out_dir}/test_data/" + get_label_info(target_name, "dir")
+
+  #
+  # Define antlion test host_test_data().
+  #
+  _host_test_data_target = "${target_name}_test_data"
+  host_test_data(_host_test_data_target) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info(":${_python_binary_target}", "target_out_dir") +
+                "/${_python_binary_name}" ]
+    outputs = [ "${_test_dir}/${_python_binary_name}" ]
+    deps = [ ":${_python_binary_target}" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  #
+  # Define SSH binary host_test_data().
+  #
+  _host_test_data_ssh = "${target_name}_test_data_ssh"
+  host_test_data(_host_test_data_ssh) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ "//prebuilt/third_party/openssh-portable/${host_os}-${host_cpu}/bin/ssh" ]
+    outputs = [ "${_test_dir}/ssh" ]
+  }
+
+  #
+  # Define Mobly test params YAML host_test_data().
+  #
+  if (defined(invoker.test_params)) {
+    _host_test_data_test_params = "${target_name}_test_data_test_params"
+    host_test_data(_host_test_data_test_params) {
+      testonly = true
+      visibility = [ ":*" ]
+      sources = [ invoker.test_params ]
+      outputs = [ "${_test_dir}/${invoker.test_params}" ]
+    }
+  }
+
+  #
+  # Define FFX binary host_test_data().
+  #
+  _host_test_data_ffx = "${target_name}_test_data_ffx"
+  host_test_data(_host_test_data_ffx) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info("//src/developer/ffx", "root_out_dir") + "/ffx" ]
+    outputs = [ "${_test_dir}/ffx" ]
+    deps = [ "//src/developer/ffx:ffx_bin($host_toolchain)" ]
+  }
+
+  #
+  # Define the antlion host_test() using antlion-runner.
+  #
+  host_test(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "environments",
+                             "visibility",
+                           ])
+
+    binary_path = "${root_out_dir}/antlion-runner"
+
+    args = [
+      "--python-bin",
+      rebase_path(python_exe_src, root_build_dir),
+      "--antlion-pyz",
+      rebase_path("${_test_dir}/${_python_binary_name}", root_build_dir),
+      "--out-dir",
+      rebase_path("${_test_dir}", root_build_dir),
+      "--ffx-binary",
+      rebase_path("${_test_dir}/ffx", root_build_dir),
+      "--ssh-binary",
+      rebase_path("${_test_dir}/ssh", root_build_dir),
+    ]
+
+    deps = [
+      ":${_host_test_data_ffx}",
+      ":${_host_test_data_ssh}",
+      ":${_host_test_data_target}",
+      "//build/python:interpreter",
+      "//third_party/antlion/runner",
+    ]
+
+    if (defined(invoker.test_params)) {
+      args += [
+        "--test-params",
+        rebase_path("${_test_dir}/${invoker.test_params}", root_build_dir),
+      ]
+      deps += [ ":${_host_test_data_test_params}" ]
+    }
+
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+  }
+}
diff --git a/environments.gni b/environments.gni
new file mode 100644
index 0000000..2bdfb53
--- /dev/null
+++ b/environments.gni
@@ -0,0 +1,141 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+antlion_astro_env = {
+  dimensions = {
+    device_type = "Astro"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+antlion_sherlock_env = {
+  dimensions = {
+    device_type = "Sherlock"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+antlion_nelson_env = {
+  dimensions = {
+    device_type = "Nelson"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+display_envs = [
+  antlion_astro_env,
+  antlion_sherlock_env,
+  antlion_nelson_env,
+]
+
+display_ap_envs = [
+  astro_ap_env,
+  sherlock_ap_env,
+  nelson_ap_env,
+]
+
+display_ap_iperf_envs = [
+  astro_ap_iperf_env,
+  sherlock_ap_iperf_env,
+  nelson_ap_iperf_env,
+]
+
+display_ap_iperf_attenuator_envs = [
+  astro_ap_iperf_attenuator_env,
+  sherlock_ap_iperf_attenuator_env,
+  nelson_ap_iperf_attenuator_env,
+]
diff --git a/pyproject.toml b/pyproject.toml
index c0fa915..b385122 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,25 +20,19 @@
 [project.optional-dependencies]
 # Required to support development tools
 dev = [
+    "mock",    # required for unit tests
     "shiv",    # packaging python
     "toml",    # required for pyproject.toml
     "vulture", # finds unused code
-    "yapf",    # code formatting
+    "black",   # code formatting
 ]
 digital_loggers_pdu = ["dlipower"]
-bluetooth = ["soundfile"]
 html_graphing = ["bokeh"]
 flash = ["usbinfo"]
 mdns = ["psutil", "zeroconf"]
 android = [
-    "Monsoon",
     "numpy",
-    "paramiko[ed25519]",
-    "pylibftdi",
-    "pyserial",
-    "requests",
     "scapy",
-    "scipy",
 ]
 
 [project.scripts]
@@ -49,19 +43,13 @@
 
 [tool.autoflake]
 imports = [
-    "Monsoon",
     "antlion",
     "dataclasses",
     "dlipower",
     "mobly",
     "mock",
     "numpy",
-    "paramiko",
-    "protobuf",
-    "pylibftdi",
-    "requests",
     "scapy",
-    "scipy",
     "tenacity",
     "usbinfo",
     "zeroconf",
diff --git a/runner/BUILD.gn b/runner/BUILD.gn
new file mode 100644
index 0000000..d405592
--- /dev/null
+++ b/runner/BUILD.gn
@@ -0,0 +1,50 @@
+import("//build/rust/rustc_binary.gni")
+import("//build/rust/rustc_test.gni")
+
+rustc_binary("runner") {
+  output_name = "antlion-runner"
+  edition = "2021"
+  with_unit_tests = true
+
+  deps = [
+    "//src/developer/ffx/lib/netext:lib($host_toolchain)",
+    "//src/lib/mdns/rust:mdns",
+    "//src/lib/network/packet",
+    "//third_party/rust_crates:anyhow",
+    "//third_party/rust_crates:argh",
+    "//third_party/rust_crates:home",
+    "//third_party/rust_crates:itertools",
+    "//third_party/rust_crates:lazy_static",
+    "//third_party/rust_crates:nix",
+    "//third_party/rust_crates:serde",
+    "//third_party/rust_crates:serde_json",
+    "//third_party/rust_crates:serde_yaml",
+    "//third_party/rust_crates:socket2",
+    "//third_party/rust_crates:thiserror",
+  ]
+
+  test_deps = [
+    "//third_party/rust_crates:assert_matches",
+    "//third_party/rust_crates:indoc",
+    "//third_party/rust_crates:pretty_assertions",
+    "//third_party/rust_crates:tempfile",
+  ]
+
+  sources = [
+    "src/config.rs",
+    "src/driver/infra.rs",
+    "src/driver/local.rs",
+    "src/driver/mod.rs",
+    "src/env.rs",
+    "src/finder.rs",
+    "src/main.rs",
+    "src/net.rs",
+    "src/runner.rs",
+    "src/yaml.rs",
+  ]
+}
+
+group("tests") {
+  testonly = true
+  deps = [ ":runner_test($host_toolchain)" ]
+}
diff --git a/runner/README.md b/runner/README.md
new file mode 100644
index 0000000..45c926a
--- /dev/null
+++ b/runner/README.md
@@ -0,0 +1,42 @@
+# antlion-runner
+
+A program to run antlion locally and in infrastructure. Includes a config
+generator with mDNS discovery and sensible defaults.
+
+## Using locally with an emulator
+
+Running antlion locally with a Fuchsia emulator allows developers to perform a
+sanity checks on their changes. Running this way is very quick (~5 seconds) and
+can spot simple mistakes before code review!
+
+1. Build Fuchsia with antlion support
+   ```sh
+   jiri update -gc # if you haven't updated in a while
+   fx set workstation_eng_paused.qemu-x64 \
+      --with-host //third_party/antlion:e2e_tests \
+      --with-host //third_party/antlion:tests \
+      --with //src/testing/sl4f
+   fx build # if you haven't built in a while
+   ```
+2. Start the package server. Keep this running in the background.
+   ```sh
+   fx serve
+   ```
+3. In a separate terminal, start the emulator with access to external networks.
+   ```sh
+   ffx emu stop && ffx emu start -H --net tap && ffx log
+   ```
+4. In a separate terminal, run a test
+   ```sh
+   fx test --e2e --output //third_party/antlion:sl4f_sanity_test
+   ```
+
+> Note: Local testing with auxiliary devices is not supported by antlion runner.
+Use antlion directly for these cases; see the antlion [README](../README.md).
+
+## Testing
+
+```sh
+fx set core.qemu-x64 --with //vendor/google/build/python/antlion/runner:tests
+fx test --output //vendor/google/build/python/antlion/runner:runner_test
+```
diff --git a/runner/src/config.rs b/runner/src/config.rs
new file mode 100644
index 0000000..571a8ab
--- /dev/null
+++ b/runner/src/config.rs
@@ -0,0 +1,162 @@
+// Copyright 2022 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::net::IpAddr;
+use crate::yaml;
+
+use std::path::PathBuf;
+
+use serde::{Deserialize, Serialize};
+use serde_yaml::Value;
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Config used by antlion for declaring testbeds and test parameters.
+pub(crate) struct Config {
+    #[serde(rename = "TestBeds")]
+    pub testbeds: Vec<Testbed>,
+    pub mobly_params: MoblyParams,
+}
+
+impl Config {
+    /// Merge the given test parameters into all testbeds.
+    pub fn merge_test_params(&mut self, test_params: Value) {
+        for testbed in self.testbeds.iter_mut() {
+            match testbed.test_params.as_mut() {
+                Some(existing) => yaml::merge(existing, test_params.clone()),
+                None => testbed.test_params = Some(test_params.clone()),
+            }
+        }
+    }
+}
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Parameters consumed by Mobly.
+pub(crate) struct MoblyParams {
+    pub log_path: PathBuf,
+}
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// A group of interconnected devices to be used together during an antlion test.
+pub(crate) struct Testbed {
+    pub name: String,
+    pub controllers: Controllers,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub test_params: Option<Value>,
+}
+
+#[derive(Clone, Debug, Default, Serialize)]
+pub(crate) struct Controllers {
+    #[serde(rename = "FuchsiaDevice", skip_serializing_if = "Vec::is_empty")]
+    pub fuchsia_devices: Vec<Fuchsia>,
+    #[serde(rename = "AccessPoint", skip_serializing_if = "Vec::is_empty")]
+    pub access_points: Vec<AccessPoint>,
+    #[serde(rename = "Attenuator", skip_serializing_if = "Vec::is_empty")]
+    pub attenuators: Vec<Attenuator>,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Vec::is_empty")]
+    pub pdus: Vec<Pdu>,
+    #[serde(rename = "IPerfServer", skip_serializing_if = "Vec::is_empty")]
+    pub iperf_servers: Vec<IPerfServer>,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// A Fuchsia device for use with antlion as defined by [fuchsia_device.py].
+///
+/// [fuchsia_device.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/fuchsia_device.py
+pub(crate) struct Fuchsia {
+    pub mdns_name: String,
+    pub ip: IpAddr,
+    pub take_bug_report_on_fail: bool,
+    pub ssh_binary_path: PathBuf,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub ssh_config: Option<PathBuf>,
+    pub ffx_binary_path: PathBuf,
+    pub ssh_priv_key: PathBuf,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
+    pub pdu_device: Option<PduRef>,
+    pub hard_reboot_on_fail: bool,
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+/// Reference to a PDU device. Used to specify which port the attached device
+/// maps to on the PDU.
+pub(crate) struct PduRef {
+    #[serde(default = "default_pdu_device")]
+    pub device: String,
+    #[serde(rename(serialize = "host"))]
+    pub ip: IpAddr,
+    pub port: u8,
+}
+
+fn default_pdu_device() -> String {
+    "synaccess.np02b".to_string()
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Declares an access point for use with antlion as defined by [access_point.py].
+///
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/access_point.py
+pub(crate) struct AccessPoint {
+    pub wan_interface: String,
+    pub ssh_config: SshConfig,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
+    pub pdu_device: Option<PduRef>,
+    #[serde(rename = "Attenuator", skip_serializing_if = "Option::is_none")]
+    pub attenuators: Option<Vec<AttenuatorRef>>,
+}
+
+#[derive(Clone, Debug, Serialize)]
+pub(crate) struct SshConfig {
+    pub ssh_binary_path: PathBuf,
+    pub host: IpAddr,
+    pub user: String,
+    pub identity_file: PathBuf,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Reference to an attenuator device. Used to specify which ports the attached
+/// devices' channels maps to on the attenuator.
+pub(crate) struct AttenuatorRef {
+    #[serde(rename = "Address")]
+    pub address: IpAddr,
+    #[serde(rename = "attenuator_ports_wifi_2g")]
+    pub ports_2g: Vec<u8>,
+    #[serde(rename = "attenuator_ports_wifi_5g")]
+    pub ports_5g: Vec<u8>,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Declares an attenuator for use with antlion as defined by [attenuator.py].
+///
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/attenuator.py
+pub(crate) struct Attenuator {
+    pub model: String,
+    pub instrument_count: u8,
+    pub address: IpAddr,
+    pub protocol: String,
+    pub port: u16,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
+/// Declares a power distribution unit for use with antlion as defined by [pdu.py].
+///
+/// [pdu.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/pdu.py
+pub(crate) struct Pdu {
+    pub device: String,
+    pub host: IpAddr,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Declares an iPerf3 server for use with antlion as defined by [iperf_server.py].
+///
+/// [iperf_server.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/iperf_server.py
+pub(crate) struct IPerfServer {
+    pub ssh_config: SshConfig,
+    pub port: u16,
+    pub test_interface: String,
+    pub use_killall: bool,
+}
diff --git a/runner/src/driver/infra.rs b/runner/src/driver/infra.rs
new file mode 100644
index 0000000..ceff26e
--- /dev/null
+++ b/runner/src/driver/infra.rs
@@ -0,0 +1,898 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::config::PduRef;
+use crate::config::{self, Config};
+use crate::driver::Driver;
+use crate::env::Environment;
+use crate::net::IpAddr;
+use crate::yaml;
+
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use anyhow::{anyhow, Context, Result};
+use itertools::Itertools;
+use serde::Deserialize;
+use serde_yaml::Value;
+use thiserror::Error;
+
+const TESTBED_NAME: &'static str = "antlion-runner";
+const ENV_OUT_DIR: &'static str = "FUCHSIA_TEST_OUTDIR";
+const ENV_TESTBED_CONFIG: &'static str = "FUCHSIA_TESTBED_CONFIG";
+const TEST_SUMMARY_FILE: &'static str = "test_summary.yaml";
+
+#[derive(Debug)]
+/// Driver for running antlion on emulated and hardware testbeds hosted by
+/// Fuchsia infrastructure.
+pub(crate) struct InfraDriver {
+    output_dir: PathBuf,
+    config: Config,
+}
+
+#[derive(Error, Debug)]
+pub(crate) enum InfraDriverError {
+    #[error("infra environment not detected, \"{0}\" environment variable not present")]
+    NotDetected(String),
+    #[error(transparent)]
+    Config(#[from] ConfigError),
+    #[error(transparent)]
+    Other(#[from] anyhow::Error),
+}
+
+#[derive(Error, Debug)]
+pub(crate) enum ConfigError {
+    #[error("ip {ip} in use by several devices")]
+    DuplicateIp { ip: IpAddr },
+    #[error("ip {ip} port {port} in use by several devices")]
+    DuplicatePort { ip: IpAddr, port: u8 },
+}
+
+impl InfraDriver {
+    /// Detect an InfraDriver. Returns None if the required environmental
+    /// variables are not found.
+    pub fn new<E: Environment>(
+        env: E,
+        ssh_binary: PathBuf,
+        ffx_binary: PathBuf,
+    ) -> Result<Self, InfraDriverError> {
+        let config_path = match env.var(ENV_TESTBED_CONFIG) {
+            Ok(p) => PathBuf::from(p),
+            Err(std::env::VarError::NotPresent) => {
+                return Err(InfraDriverError::NotDetected(ENV_TESTBED_CONFIG.to_string()))
+            }
+            Err(e) => {
+                return Err(InfraDriverError::Other(anyhow!(
+                    "Failed to read \"{ENV_TESTBED_CONFIG}\" {e}"
+                )))
+            }
+        };
+        let config = fs::read_to_string(&config_path)
+            .with_context(|| format!("Failed to read \"{}\"", config_path.display()))?;
+        let targets: Vec<InfraTarget> = serde_json::from_str(&config)
+            .with_context(|| format!("Failed to parse into InfraTarget: \"{config}\""))?;
+        if targets.len() == 0 {
+            return Err(InfraDriverError::Other(anyhow!(
+                "Expected at least one target declared in \"{}\"",
+                config_path.display()
+            )));
+        }
+
+        let output_path = match env.var(ENV_OUT_DIR) {
+            Ok(p) => p,
+            Err(std::env::VarError::NotPresent) => {
+                return Err(InfraDriverError::NotDetected(ENV_OUT_DIR.to_string()))
+            }
+            Err(e) => {
+                return Err(InfraDriverError::Other(anyhow!(
+                    "Failed to read \"{ENV_OUT_DIR}\" {e}"
+                )))
+            }
+        };
+        let output_dir = PathBuf::from(output_path);
+        if !fs::metadata(&output_dir).context("Failed to stat the output directory")?.is_dir() {
+            return Err(InfraDriverError::Other(anyhow!(
+                "Expected a directory but found a file at \"{}\"",
+                output_dir.display()
+            )));
+        }
+
+        Ok(InfraDriver {
+            output_dir: output_dir.clone(),
+            config: InfraDriver::parse_targets(targets, ssh_binary, ffx_binary, output_dir)?,
+        })
+    }
+
+    fn parse_targets(
+        targets: Vec<InfraTarget>,
+        ssh_binary: PathBuf,
+        ffx_binary: PathBuf,
+        output_dir: PathBuf,
+    ) -> Result<Config, InfraDriverError> {
+        let mut fuchsia_devices: Vec<config::Fuchsia> = vec![];
+        let mut access_points: Vec<config::AccessPoint> = vec![];
+        let mut attenuators: HashMap<IpAddr, config::Attenuator> = HashMap::new();
+        let mut pdus: HashMap<IpAddr, config::Pdu> = HashMap::new();
+        let mut iperf_servers: Vec<config::IPerfServer> = vec![];
+        let mut test_params: Option<Value> = None;
+
+        let mut used_ips: HashSet<IpAddr> = HashSet::new();
+        let mut used_ports: HashMap<IpAddr, HashSet<u8>> = HashMap::new();
+
+        let mut register_ip = |ip: IpAddr| -> Result<(), InfraDriverError> {
+            if !used_ips.insert(ip.clone()) {
+                return Err(ConfigError::DuplicateIp { ip }.into());
+            }
+            Ok(())
+        };
+
+        let mut register_port = |ip: IpAddr, port: u8| -> Result<(), InfraDriverError> {
+            match used_ports.get_mut(&ip) {
+                Some(ports) => {
+                    if !ports.insert(port) {
+                        return Err(ConfigError::DuplicatePort { ip, port }.into());
+                    }
+                }
+                None => {
+                    if used_ports.insert(ip, HashSet::from([port])).is_some() {
+                        return Err(InfraDriverError::Other(anyhow!(
+                            "Used ports set was unexpectedly modified by concurrent use",
+                        )));
+                    }
+                }
+            };
+            Ok(())
+        };
+
+        let mut register_pdu = |p: Option<PduRef>| -> Result<(), InfraDriverError> {
+            if let Some(PduRef { device, ip, port }) = p {
+                register_port(ip.clone(), port)?;
+                let new = config::Pdu { device: device.clone(), host: ip.clone() };
+                if let Some(old) = pdus.insert(ip.clone(), new.clone()) {
+                    if old != new {
+                        return Err(ConfigError::DuplicateIp { ip }.into());
+                    }
+                }
+            }
+            Ok(())
+        };
+
+        let mut register_attenuator = |a: Option<AttenuatorRef>| -> Result<(), InfraDriverError> {
+            if let Some(a) = a {
+                let new = config::Attenuator {
+                    model: "minicircuits".to_string(),
+                    instrument_count: 4,
+                    address: a.ip.clone(),
+                    protocol: "http".to_string(),
+                    port: 80,
+                };
+                if let Some(old) = attenuators.insert(a.ip.clone(), new.clone()) {
+                    if old != new {
+                        return Err(ConfigError::DuplicateIp { ip: a.ip }.into());
+                    }
+                }
+            }
+            Ok(())
+        };
+
+        let mut merge_test_params = |p: Option<Value>| {
+            match (test_params.as_mut(), p) {
+                (None, Some(new)) => test_params = Some(new),
+                (Some(existing), Some(new)) => yaml::merge(existing, new),
+                (_, None) => {}
+            };
+        };
+
+        for target in targets {
+            match target {
+                InfraTarget::FuchsiaDevice { nodename, ipv4, ipv6, ssh_key, pdu, test_params } => {
+                    let ip: IpAddr = if !ipv4.is_empty() {
+                        ipv4.parse().context("Invalid IPv4 address")
+                    } else if !ipv6.is_empty() {
+                        ipv6.parse().context("Invalid IPv6 address")
+                    } else {
+                        Err(anyhow!("IP address not specified"))
+                    }?;
+
+                    fuchsia_devices.push(config::Fuchsia {
+                        mdns_name: nodename.clone(),
+                        ip: ip.clone(),
+                        take_bug_report_on_fail: true,
+                        ssh_binary_path: ssh_binary.clone(),
+                        // TODO(http://b/244747218): Remove when ssh_config is refactored away
+                        ssh_config: None,
+                        ffx_binary_path: ffx_binary.clone(),
+                        ssh_priv_key: ssh_key.clone(),
+                        pdu_device: pdu.clone(),
+                        hard_reboot_on_fail: true,
+                    });
+
+                    register_ip(ip)?;
+                    register_pdu(pdu)?;
+                    merge_test_params(test_params);
+                }
+                InfraTarget::AccessPoint { ip, attenuator, pdu, ssh_key } => {
+                    access_points.push(config::AccessPoint {
+                        wan_interface: "eth0".to_string(),
+                        ssh_config: config::SshConfig {
+                            ssh_binary_path: ssh_binary.clone(),
+                            host: ip.clone(),
+                            user: "root".to_string(),
+                            identity_file: ssh_key.clone(),
+                        },
+                        pdu_device: pdu.clone(),
+                        attenuators: attenuator.as_ref().map(|a| {
+                            vec![config::AttenuatorRef {
+                                address: a.ip.clone(),
+                                ports_2g: vec![1, 2, 3],
+                                ports_5g: vec![1, 2, 3],
+                            }]
+                        }),
+                    });
+
+                    register_ip(ip)?;
+                    register_pdu(pdu)?;
+                    register_attenuator(attenuator)?;
+                }
+                InfraTarget::IPerfServer { ip, user, test_interface, pdu, ssh_key } => {
+                    iperf_servers.push(config::IPerfServer {
+                        ssh_config: config::SshConfig {
+                            ssh_binary_path: ssh_binary.clone(),
+                            host: ip.clone(),
+                            user: user.to_string(),
+                            identity_file: ssh_key.clone(),
+                        },
+                        port: 5201,
+                        test_interface: test_interface.clone(),
+                        use_killall: true,
+                    });
+
+                    register_ip(ip.clone())?;
+                    register_pdu(pdu)?;
+                }
+            };
+        }
+
+        Ok(Config {
+            testbeds: vec![config::Testbed {
+                name: TESTBED_NAME.to_string(),
+                controllers: config::Controllers {
+                    fuchsia_devices: fuchsia_devices,
+                    access_points: access_points,
+                    attenuators: attenuators
+                        .into_values()
+                        .sorted_by_key(|a| a.address.clone())
+                        .collect(),
+                    pdus: pdus.into_values().sorted_by_key(|p| p.host.clone()).collect(),
+                    iperf_servers: iperf_servers,
+                },
+                test_params,
+            }],
+            mobly_params: config::MoblyParams { log_path: output_dir },
+        })
+    }
+}
+
+impl Driver for InfraDriver {
+    fn output_path(&self) -> &Path {
+        self.output_dir.as_path()
+    }
+    fn config(&self) -> Config {
+        self.config.clone()
+    }
+    fn teardown(&self) -> Result<()> {
+        let results_path =
+            self.output_dir.join(TESTBED_NAME).join("latest").join(TEST_SUMMARY_FILE);
+        match fs::File::open(&results_path) {
+            Ok(mut results) => {
+                println!("\nTest results from {}\n", results_path.display());
+                println!("[=====MOBLY RESULTS=====]");
+                std::io::copy(&mut results, &mut std::io::stdout())
+                    .context("Failed to copy results to stdout")?;
+            }
+            Err(e) => eprintln!("Failed to open \"{}\": {}", results_path.display(), e),
+        };
+
+        // Remove any symlinks from the output directory; this causes errors
+        // while uploading to CAS.
+        //
+        // TODO: Remove when the fix is released and supported on Swarming bots
+        // https://github.com/bazelbuild/remote-apis-sdks/pull/229.
+        remove_symlinks(self.output_dir.clone())?;
+
+        Ok(())
+    }
+}
+
+fn remove_symlinks<P: AsRef<Path>>(path: P) -> Result<()> {
+    let meta = fs::symlink_metadata(path.as_ref())?;
+    if meta.is_symlink() {
+        fs::remove_file(path)?;
+    } else if meta.is_dir() {
+        for entry in fs::read_dir(path)? {
+            remove_symlinks(entry?.path())?;
+        }
+    }
+    Ok(())
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+/// Schema used to communicate target information from the test environment set
+/// up by botanist.
+///
+/// See https://cs.opensource.google/fuchsia/fuchsia/+/main:tools/botanist/README.md
+enum InfraTarget {
+    FuchsiaDevice {
+        nodename: String,
+        ipv4: String,
+        ipv6: String,
+        ssh_key: PathBuf,
+        pdu: Option<PduRef>,
+        test_params: Option<Value>,
+    },
+    AccessPoint {
+        ip: IpAddr,
+        ssh_key: PathBuf,
+        attenuator: Option<AttenuatorRef>,
+        pdu: Option<PduRef>,
+    },
+    IPerfServer {
+        ip: IpAddr,
+        ssh_key: PathBuf,
+        #[serde(default = "default_iperf_user")]
+        user: String,
+        test_interface: String,
+        pdu: Option<PduRef>,
+    },
+}
+
+fn default_iperf_user() -> String {
+    "pi".to_string()
+}
+
+#[derive(Clone, Debug, Deserialize)]
+struct AttenuatorRef {
+    ip: IpAddr,
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    use crate::run;
+    use crate::runner::Runner;
+    use crate::{env::Environment, runner::ExitStatus};
+
+    use std::ffi::OsStr;
+
+    use assert_matches::assert_matches;
+    use indoc::formatdoc;
+    use pretty_assertions::assert_eq;
+    use serde_json::json;
+    use tempfile::{NamedTempFile, TempDir};
+
+    const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
+    const FUCHSIA_ADDR: &'static str = "fe80::1%2";
+
+    #[derive(Default)]
+    struct MockRunner {
+        out_dir: PathBuf,
+        config: std::cell::Cell<PathBuf>,
+    }
+    impl MockRunner {
+        fn new(out_dir: PathBuf) -> Self {
+            Self { out_dir, ..Default::default() }
+        }
+    }
+    impl Runner for MockRunner {
+        fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+            self.config.set(config);
+
+            let antlion_out = self.out_dir.join(TESTBED_NAME).join("latest");
+            fs::create_dir_all(&antlion_out)
+                .context("Failed to create antlion output directory")?;
+            fs::write(antlion_out.join(TEST_SUMMARY_FILE), "")
+                .context("Failed to write test_summary.yaml")?;
+            Ok(ExitStatus::Ok)
+        }
+    }
+
+    struct MockEnvironment {
+        config: Option<PathBuf>,
+        out_dir: Option<PathBuf>,
+    }
+    impl Environment for MockEnvironment {
+        fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, std::env::VarError> {
+            if key.as_ref() == ENV_TESTBED_CONFIG {
+                self.config
+                    .clone()
+                    .ok_or(std::env::VarError::NotPresent)
+                    .map(|p| p.into_os_string().into_string().unwrap())
+            } else if key.as_ref() == ENV_OUT_DIR {
+                self.out_dir
+                    .clone()
+                    .ok_or(std::env::VarError::NotPresent)
+                    .map(|p| p.into_os_string().into_string().unwrap())
+            } else {
+                Err(std::env::VarError::NotPresent)
+            }
+        }
+    }
+
+    #[test]
+    fn infra_not_detected() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let env = MockEnvironment { config: None, out_dir: None };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(InfraDriverError::NotDetected(_)));
+    }
+
+    #[test]
+    fn infra_not_detected_config() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+        let env = MockEnvironment { config: None, out_dir: Some(out_dir.path().to_path_buf()) };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_TESTBED_CONFIG);
+    }
+
+    #[test]
+    fn infra_not_detected_out_dir() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let env =
+            MockEnvironment { config: Some(testbed_config.path().to_path_buf()), out_dir: None };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_OUT_DIR);
+    }
+
+    #[test]
+    fn infra_invalid_config() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(testbed_config.as_file(), &json!({ "foo": "bar" })).unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got, Err(_));
+    }
+
+    #[test]
+    fn infra() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        run(runner, driver, None).unwrap();
+
+        let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_with_test_params() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+                "test_params": {
+                    "sl4f_sanity_test_params": {
+                        "can_overwrite": false,
+                        "from_original": true,
+                    }
+                }
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        let params = "
+            sl4f_sanity_test_params:
+                merged_with: true
+                can_overwrite: true
+        ";
+        let params = serde_yaml::from_str(params).unwrap();
+        run(runner, driver, Some(params)).unwrap();
+
+        let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+          TestParams:
+            sl4f_sanity_test_params:
+              can_overwrite: true
+              from_original: true
+              merged_with: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_with_auxiliary_devices() {
+        const FUCHSIA_PDU_IP: &'static str = "192.168.42.14";
+        const FUCHSIA_PDU_PORT: u8 = 1;
+        const AP_IP: &'static str = "192.168.42.11";
+        const AP_AND_IPERF_PDU_IP: &'static str = "192.168.42.13";
+        const AP_PDU_PORT: u8 = 1;
+        const ATTENUATOR_IP: &'static str = "192.168.42.15";
+        const IPERF_IP: &'static str = "192.168.42.12";
+        const IPERF_USER: &'static str = "alice";
+        const IPERF_PDU_PORT: u8 = 2;
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": FUCHSIA_PDU_IP,
+                    "port": FUCHSIA_PDU_PORT,
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": AP_IP,
+                "ssh_key": ssh_key.path(),
+                "attenuator": {
+                    "ip": ATTENUATOR_IP,
+                },
+                "pdu": {
+                    "ip": AP_AND_IPERF_PDU_IP,
+                    "port": AP_PDU_PORT,
+                    "device": "fancy-pdu",
+                },
+            }, {
+                "type": "IPerfServer",
+                "ip": IPERF_IP,
+                "ssh_key": ssh_key.path(),
+                "user": IPERF_USER,
+                "test_interface": "eth0",
+                "pdu": {
+                    "ip": AP_AND_IPERF_PDU_IP,
+                    "port": IPERF_PDU_PORT,
+                    "device": "fancy-pdu",
+                },
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              PduDevice:
+                device: synaccess.np02b
+                host: {FUCHSIA_PDU_IP}
+                port: {FUCHSIA_PDU_PORT}
+              hard_reboot_on_fail: true
+            AccessPoint:
+            - wan_interface: eth0
+              ssh_config:
+                ssh_binary_path: {ssh_path}
+                host: {AP_IP}
+                user: root
+                identity_file: {ssh_key_path}
+              PduDevice:
+                device: fancy-pdu
+                host: {AP_AND_IPERF_PDU_IP}
+                port: {AP_PDU_PORT}
+              Attenuator:
+              - Address: {ATTENUATOR_IP}
+                attenuator_ports_wifi_2g:
+                - 1
+                - 2
+                - 3
+                attenuator_ports_wifi_5g:
+                - 1
+                - 2
+                - 3
+            Attenuator:
+            - Model: minicircuits
+              InstrumentCount: 4
+              Address: {ATTENUATOR_IP}
+              Protocol: http
+              Port: 80
+            PduDevice:
+            - device: fancy-pdu
+              host: {AP_AND_IPERF_PDU_IP}
+            - device: synaccess.np02b
+              host: {FUCHSIA_PDU_IP}
+            IPerfServer:
+            - ssh_config:
+                ssh_binary_path: {ssh_path}
+                host: {IPERF_IP}
+                user: {IPERF_USER}
+                identity_file: {ssh_key_path}
+              port: 5201
+              test_interface: eth0
+              use_killall: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_duplicate_port_pdu() {
+        let pdu_ip: IpAddr = "192.168.42.13".parse().unwrap();
+        let pdu_port = 1;
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": "",
+                "ipv6": "fe80::1%2",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": pdu_ip,
+                    "port": pdu_port,
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": "192.168.42.11",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": pdu_ip,
+                    "port": pdu_port,
+                },
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got,
+            Err(InfraDriverError::Config(ConfigError::DuplicatePort { ip, port }))
+                if ip == pdu_ip && port == pdu_port
+        );
+    }
+
+    #[test]
+    fn infra_duplicate_ip_pdu() {
+        let duplicate_ip: IpAddr = "192.168.42.13".parse().unwrap();
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": "",
+                "ipv6": "fe80::1%2",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": duplicate_ip,
+                    "port": 1,
+                    "device": "A",
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": "192.168.42.11",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": duplicate_ip,
+                    "port": 2,
+                    "device": "B",
+                },
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        assert_matches!(
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()),
+            Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
+                if ip == duplicate_ip
+        );
+    }
+
+    #[test]
+    fn infra_duplicate_ip_devices() {
+        let duplicate_ip: IpAddr = "192.168.42.11".parse().unwrap();
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": duplicate_ip,
+                "ipv6": "",
+                "ssh_key": ssh_key.path(),
+            }, {
+                "type": "AccessPoint",
+                "ip": duplicate_ip,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        assert_matches!(got,
+            Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
+                if ip == duplicate_ip
+        );
+    }
+
+    #[test]
+    fn remove_symlinks_works() {
+        const SYMLINK_FILE: &'static str = "latest";
+
+        let out_dir = TempDir::new().unwrap();
+        let test_file = NamedTempFile::new_in(&out_dir).unwrap();
+        let symlink_path = out_dir.path().join(SYMLINK_FILE);
+
+        #[cfg(unix)]
+        std::os::unix::fs::symlink(&test_file, &symlink_path).unwrap();
+        #[cfg(windows)]
+        std::os::windows::fs::symlink_file(&test_file, &symlink_path).unwrap();
+
+        assert_matches!(remove_symlinks(out_dir.path()), Ok(()));
+        assert_matches!(fs::symlink_metadata(symlink_path), Err(e) if e.kind() == std::io::ErrorKind::NotFound);
+        assert_matches!(fs::symlink_metadata(test_file), Ok(meta) if meta.is_file());
+    }
+}
diff --git a/runner/src/driver/local.rs b/runner/src/driver/local.rs
new file mode 100644
index 0000000..983a6a7
--- /dev/null
+++ b/runner/src/driver/local.rs
@@ -0,0 +1,287 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::config;
+use crate::driver::Driver;
+use crate::finder::{Answer, Finder};
+use crate::net::IpAddr;
+
+use std::path::{Path, PathBuf};
+
+use anyhow::{ensure, Context, Result};
+use home::home_dir;
+
+const TESTBED_NAME: &'static str = "antlion-runner";
+
+/// Driver for running antlion locally on an emulated or hardware testbed with
+/// optional mDNS discovery when a DHCP server is not available. This is useful
+/// for testing changes locally in a development environment.
+pub(crate) struct LocalDriver {
+    target: LocalTarget,
+    output_dir: PathBuf,
+    ssh_binary: PathBuf,
+    ffx_binary: PathBuf,
+}
+
+impl LocalDriver {
+    pub fn new<F>(
+        device: Option<String>,
+        ssh_binary: PathBuf,
+        ssh_key: Option<PathBuf>,
+        ffx_binary: PathBuf,
+        out_dir: Option<PathBuf>,
+    ) -> Result<Self>
+    where
+        F: Finder,
+    {
+        let output_dir = match out_dir {
+            Some(p) => Ok(p),
+            None => std::env::current_dir().context("Failed to get current working directory"),
+        }?;
+        Ok(Self {
+            target: LocalTarget::new::<F>(device, ssh_key)?,
+            output_dir,
+            ssh_binary,
+            ffx_binary,
+        })
+    }
+}
+
+impl Driver for LocalDriver {
+    fn output_path(&self) -> &Path {
+        self.output_dir.as_path()
+    }
+    fn config(&self) -> config::Config {
+        config::Config {
+            testbeds: vec![config::Testbed {
+                name: TESTBED_NAME.to_string(),
+                controllers: config::Controllers {
+                    fuchsia_devices: vec![config::Fuchsia {
+                        mdns_name: self.target.name.clone(),
+                        ip: self.target.ip.clone(),
+                        take_bug_report_on_fail: true,
+                        ssh_binary_path: self.ssh_binary.clone(),
+                        // TODO(http://b/244747218): Remove when ssh_config is refactored away
+                        ssh_config: None,
+                        ffx_binary_path: self.ffx_binary.clone(),
+                        ssh_priv_key: self.target.ssh_key.clone(),
+                        pdu_device: None,
+                        hard_reboot_on_fail: true,
+                    }],
+                    ..Default::default()
+                },
+                test_params: None,
+            }],
+            mobly_params: config::MoblyParams { log_path: self.output_dir.clone() },
+        }
+    }
+    fn teardown(&self) -> Result<()> {
+        println!(
+            "\nView full antlion logs at {}",
+            self.output_dir.join(TESTBED_NAME).join("latest").display()
+        );
+        Ok(())
+    }
+}
+
+/// LocalTargetInfo performs best-effort discovery of target information from
+/// standard Fuchsia environmental variables.
+struct LocalTarget {
+    name: String,
+    ip: IpAddr,
+    ssh_key: PathBuf,
+}
+
+impl LocalTarget {
+    fn new<F>(device: Option<String>, ssh_key: Option<PathBuf>) -> Result<Self>
+    where
+        F: Finder,
+    {
+        let device_name = device.or_else(|| match std::env::var("FUCHSIA_DIR") {
+            Ok(dir) => match std::fs::read_to_string(format!("{dir}/out/default.device")) {
+                Ok(name) => Some(name.trim().to_string()),
+                Err(_) => {
+                    println!("A default device using \"fx set-device\" has not been set");
+                    println!("Using the first Fuchsia device discovered via mDNS");
+                    None
+                }
+            },
+            Err(_) => {
+                println!("Neither --device nor FUCHSIA_DIR has been set");
+                println!("Using the first Fuchsia device discovered via mDNS");
+                None
+            }
+        });
+
+        let Answer { name, ip } = F::find_device(device_name)?;
+
+        // TODO: Move this validation out to Args
+        let ssh_key = ssh_key
+            .or_else(|| home_dir().map(|p| p.join(".ssh/fuchsia_ed25519").to_path_buf()))
+            .context("Failed to detect the private Fuchsia SSH key")?;
+
+        ensure!(
+            ssh_key.try_exists().with_context(|| format!(
+                "Failed to check existence of SSH key \"{}\"",
+                ssh_key.display()
+            ))?,
+            "Cannot find SSH key \"{}\"",
+            ssh_key.display()
+        );
+
+        Ok(LocalTarget { name, ip, ssh_key })
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    use crate::finder::{Answer, Finder};
+    use crate::run;
+    use crate::runner::{ExitStatus, Runner};
+
+    use indoc::formatdoc;
+    use pretty_assertions::assert_eq;
+    use tempfile::{NamedTempFile, TempDir};
+
+    const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
+    const FUCHSIA_ADDR: &'static str = "fe80::1%2";
+    const FUCHSIA_IP: &'static str = "fe80::1";
+    const SCOPE_ID: u32 = 2;
+
+    struct MockFinder;
+    impl Finder for MockFinder {
+        fn find_device(_: Option<String>) -> Result<Answer> {
+            Ok(Answer {
+                name: FUCHSIA_NAME.to_string(),
+                ip: IpAddr::V6(FUCHSIA_IP.parse().unwrap(), Some(SCOPE_ID)),
+            })
+        }
+    }
+
+    #[derive(Default)]
+    struct MockRunner {
+        config: std::cell::Cell<PathBuf>,
+    }
+    impl Runner for MockRunner {
+        fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+            self.config.set(config);
+            Ok(ExitStatus::Ok)
+        }
+    }
+
+    #[test]
+    fn local_invalid_ssh_key() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        assert!(LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(PathBuf::new()),
+            ffx.path().to_path_buf(),
+            Some(out_dir.path().to_path_buf()),
+        )
+        .is_err());
+    }
+
+    #[test]
+    fn local() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(out_dir.path().to_path_buf()),
+        )
+        .unwrap();
+
+        run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display();
+        let ssh_key_path = ssh_key.path().display();
+        let ffx_path = ffx.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn local_with_test_params() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(out_dir.path().to_path_buf()),
+        )
+        .unwrap();
+
+        let params_yaml = "
+        sl4f_sanity_test_params:
+            foo: bar
+        ";
+        let params = serde_yaml::from_str(params_yaml).unwrap();
+
+        run(runner, driver, Some(params)).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+          TestParams:
+            sl4f_sanity_test_params:
+              foo: bar
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+}
diff --git a/runner/src/driver/mod.rs b/runner/src/driver/mod.rs
new file mode 100644
index 0000000..35de41f
--- /dev/null
+++ b/runner/src/driver/mod.rs
@@ -0,0 +1,24 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+pub(crate) mod infra;
+pub(crate) mod local;
+
+use crate::config::Config;
+
+use std::path::Path;
+
+use anyhow::Result;
+
+/// Driver provide insight into the information surrounding running an antlion
+/// test.
+pub(crate) trait Driver {
+    /// Path to output directory for test artifacts.
+    fn output_path(&self) -> &Path;
+    /// Antlion config for use during test.
+    fn config(&self) -> Config;
+    /// Additional logic to run after all tests run, regardless of tests passing
+    /// or failing.
+    fn teardown(&self) -> Result<()>;
+}
diff --git a/runner/src/env.rs b/runner/src/env.rs
new file mode 100644
index 0000000..ede8b74
--- /dev/null
+++ b/runner/src/env.rs
@@ -0,0 +1,25 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use std::ffi::OsStr;
+use std::env::VarError;
+
+/// Inspection of the process's environment.
+pub(crate) trait Environment {
+	/// Fetches the environment variable `key` from the current process.
+	/// 
+	/// See [std::env::var] for details.
+	///
+	/// [std::env::var]: https://doc.rust-lang.org/std/env/fn.var.html
+    fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError>;
+}
+
+/// Query the local process's environment.
+pub(crate) struct LocalEnvironment;
+
+impl Environment for LocalEnvironment {
+    fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError> {
+        std::env::var(key)
+    }
+}
diff --git a/runner/src/finder.rs b/runner/src/finder.rs
new file mode 100644
index 0000000..c381b36
--- /dev/null
+++ b/runner/src/finder.rs
@@ -0,0 +1,200 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::net::IpAddr;
+
+use std::io;
+use std::net::{Ipv6Addr, SocketAddr, SocketAddrV6, UdpSocket};
+use std::str;
+use std::time::{Duration, Instant};
+
+use anyhow::{bail, Context, Result};
+use mdns::protocol as dns;
+use netext::{get_mcast_interfaces, IsLocalAddr, McastInterface};
+use packet::{InnerPacketBuilder, ParseBuffer};
+use socket2::{Domain, Protocol, Socket, Type};
+
+const FUCHSIA_DOMAIN: &str = "_fuchsia._udp.local";
+const MDNS_MCAST_V6: Ipv6Addr = Ipv6Addr::new(0xff02, 0, 0, 0, 0, 0, 0, 0x00fb);
+const MDNS_PORT: u16 = 5353;
+const MDNS_TIMEOUT: Duration = Duration::from_secs(10);
+
+lazy_static::lazy_static! {
+    static ref MDNS_QUERY: &'static [u8] = construct_query_buf(FUCHSIA_DOMAIN);
+}
+
+/// Find Fuchsia devices.
+pub(crate) trait Finder {
+    /// Find a Fuchsia device, preferring `device_name` if specified.
+    fn find_device(device_name: Option<String>) -> Result<Answer>;
+}
+
+/// Answer from a Finder.
+pub(crate) struct Answer {
+    /// Name of the Fuchsia device.
+    pub name: String,
+    /// IP address of the Fuchsia device.
+    pub ip: IpAddr,
+}
+
+pub(crate) struct MulticastDns {}
+
+impl Finder for MulticastDns {
+    /// Find a Fuchsia device using mDNS. If `device_name` is not specified, the
+    /// first device will be used.
+    fn find_device(device_name: Option<String>) -> Result<Answer> {
+        let interfaces =
+            get_mcast_interfaces().context("Failed to list multicast-enabled interfaces")?;
+        let interface_names =
+            interfaces.iter().map(|i| i.name.clone()).collect::<Vec<String>>().join(", ");
+        if let Some(ref d) = device_name {
+            println!("Performing mDNS discovery for {d} on interfaces: {interface_names}");
+        } else {
+            println!("Performing mDNS discovery on interfaces: {interface_names}");
+        }
+
+        let socket = create_socket(interfaces.iter()).context("Failed to create mDNS socket")?;
+
+        // TODO(http://b/264936590): Remove the race condition where the Fuchsia
+        // device can send its answer before this socket starts listening. Add an
+        // async runtime and concurrently listen for answers while sending queries.
+        send_queries(&socket, interfaces.iter()).context("Failed to send mDNS queries")?;
+        let answer = listen_for_answers(socket, device_name)?;
+
+        println!("Device {} found at {}", answer.name, answer.ip);
+        Ok(answer)
+    }
+}
+
+fn construct_query_buf(service: &str) -> &'static [u8] {
+    let question = dns::QuestionBuilder::new(
+        dns::DomainBuilder::from_str(service).unwrap(),
+        dns::Type::Ptr,
+        dns::Class::In,
+        true,
+    );
+
+    let mut message = dns::MessageBuilder::new(0, true);
+    message.add_question(question);
+
+    let mut buf = vec![0; message.bytes_len()];
+    message.serialize(buf.as_mut_slice());
+    Box::leak(buf.into_boxed_slice())
+}
+
+/// Create a socket for both sending and listening on all multicast-capable
+/// interfaces.
+fn create_socket<'a>(interfaces: impl Iterator<Item = &'a McastInterface>) -> Result<Socket> {
+    let socket = Socket::new(Domain::IPV6, Type::DGRAM, Some(Protocol::UDP))?;
+    let read_timeout = Duration::from_millis(100);
+    socket
+        .set_read_timeout(Some(read_timeout))
+        .with_context(|| format!("Failed to set SO_RCVTIMEO to {}ms", read_timeout.as_millis()))?;
+    socket.set_only_v6(true).context("Failed to set IPV6_V6ONLY")?;
+    socket.set_reuse_address(true).context("Failed to set SO_REUSEADDR")?;
+    socket.set_reuse_port(true).context("Failed to set SO_REUSEPORT")?;
+
+    for interface in interfaces {
+        // Listen on all multicast-enabled interfaces
+        match interface.id() {
+            Ok(id) => match socket.join_multicast_v6(&MDNS_MCAST_V6, id) {
+                Ok(()) => {}
+                Err(e) => eprintln!("Failed to join mDNS multicast group on interface {id}: {e}"),
+            },
+            Err(e) => eprintln!("Failed to listen on interface {}: {}", interface.name, e),
+        }
+    }
+
+    socket
+        .bind(&SocketAddrV6::new(Ipv6Addr::UNSPECIFIED, 0, 0, 0).into())
+        .with_context(|| format!("Failed to bind to unspecified IPv6"))?;
+
+    Ok(socket)
+}
+
+fn send_queries<'a>(
+    socket: &Socket,
+    interfaces: impl Iterator<Item = &'a McastInterface>,
+) -> Result<()> {
+    let to_addr = SocketAddrV6::new(MDNS_MCAST_V6, MDNS_PORT, 0, 0).into();
+
+    for interface in interfaces {
+        let id = interface
+            .id()
+            .with_context(|| format!("Failed to get interface ID for {}", interface.name))?;
+        socket
+            .set_multicast_if_v6(id)
+            .with_context(|| format!("Failed to set multicast interface for {}", interface.name))?;
+        for addr in &interface.addrs {
+            if let SocketAddr::V6(addr_v6) = addr {
+                if !addr.ip().is_local_addr() || addr.ip().is_loopback() {
+                    continue;
+                }
+                if let Err(e) = socket.send_to(&MDNS_QUERY, &to_addr) {
+                    eprintln!(
+                        "Failed to send mDNS query out {} via {}: {e}",
+                        interface.name,
+                        addr_v6.ip()
+                    );
+                    continue;
+                }
+            }
+        }
+    }
+    Ok(())
+}
+
+fn listen_for_answers(socket: Socket, device_name: Option<String>) -> Result<Answer> {
+    let s: UdpSocket = socket.into();
+    let mut buf = [0; 1500];
+
+    let end = Instant::now() + MDNS_TIMEOUT;
+    while Instant::now() < end {
+        match s.recv_from(&mut buf) {
+            Ok((packet_bytes, src_sock_addr)) => {
+                if !src_sock_addr.ip().is_local_addr() {
+                    continue;
+                }
+
+                let mut packet_buf = &mut buf[..packet_bytes];
+                match packet_buf.parse::<dns::Message<_>>() {
+                    Ok(message) => {
+                        if !message.answers.iter().any(|a| a.domain == FUCHSIA_DOMAIN) {
+                            continue;
+                        }
+                        for answer in message.additional {
+                            if let Some(std::net::IpAddr::V6(addr)) = answer.rdata.ip_addr() {
+                                if let SocketAddr::V6(src_v6) = src_sock_addr {
+                                    let name = answer
+                                        .domain
+                                        .to_string()
+                                        .trim_end_matches(".local")
+                                        .to_string();
+                                    let scope_id = src_v6.scope_id();
+
+                                    if let Some(ref device) = device_name {
+                                        if &name != device {
+                                            println!("Found irrelevant device {name} at {addr}%{scope_id}");
+                                            continue;
+                                        }
+                                    }
+
+                                    return Ok(Answer {
+                                        name,
+                                        ip: IpAddr::V6(addr, Some(scope_id)),
+                                    });
+                                }
+                            }
+                        }
+                    }
+                    Err(err) => eprintln!("Failed to parse mDNS packet: {err:?}"),
+                }
+            }
+            Err(err) if err.kind() == io::ErrorKind::WouldBlock => {}
+            Err(err) => return Err(err.into()),
+        }
+    }
+
+    bail!("device {device_name:?} not found")
+}
diff --git a/runner/src/main.rs b/runner/src/main.rs
new file mode 100644
index 0000000..4252694
--- /dev/null
+++ b/runner/src/main.rs
@@ -0,0 +1,149 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+mod config;
+mod driver;
+mod env;
+mod finder;
+mod net;
+mod runner;
+mod yaml;
+
+use crate::driver::infra::{InfraDriver, InfraDriverError};
+use crate::runner::ExitStatus;
+
+use std::fs::File;
+use std::path::PathBuf;
+use std::{fs, process::ExitCode};
+
+use anyhow::{Context, Result};
+use argh::FromArgs;
+use serde_yaml;
+use serde_yaml::Value;
+
+#[derive(FromArgs)]
+/// antlion runner with config generation
+struct Args {
+    /// name of the Fuchsia device to use for testing; defaults to using mDNS
+    /// discovery
+    #[argh(option)]
+    device: Option<String>,
+
+    /// path to the SSH binary used to communicate with all devices
+    #[argh(option, from_str_fn(parse_file))]
+    ssh_binary: PathBuf,
+
+    /// path to the SSH private key used to communicate with Fuchsia; defaults
+    /// to ~/.ssh/fuchsia_ed25519
+    #[argh(option, from_str_fn(parse_file))]
+    ssh_key: Option<PathBuf>,
+
+    /// path to the FFX binary used to communicate with Fuchsia
+    #[argh(option, from_str_fn(parse_file))]
+    ffx_binary: PathBuf,
+
+    /// path to the python interpreter binary (e.g. /bin/python3.9)
+    #[argh(option)]
+    python_bin: String,
+
+    /// path to the antlion zipapp, ending in .pyz
+    #[argh(option, from_str_fn(parse_file))]
+    antlion_pyz: PathBuf,
+
+    /// path to a directory for outputting artifacts; defaults to the current
+    /// working directory or FUCHSIA_TEST_OUTDIR
+    #[argh(option, from_str_fn(parse_directory))]
+    out_dir: Option<PathBuf>,
+
+    /// path to additional YAML config for this test; placed in the
+    /// "test_params" key in the antlion config
+    #[argh(option, from_str_fn(parse_file))]
+    test_params: Option<PathBuf>,
+}
+
+fn parse_file(s: &str) -> Result<PathBuf, String> {
+    let path = PathBuf::from(s);
+    let _ = File::open(&path).map_err(|e| format!("Failed to open \"{s}\": {e}"))?;
+    Ok(path)
+}
+
+fn parse_directory(s: &str) -> Result<PathBuf, String> {
+    let path = PathBuf::from(s);
+    let meta =
+        std::fs::metadata(&path).map_err(|e| format!("Failed to read metadata of \"{s}\": {e}"))?;
+    if meta.is_file() {
+        return Err(format!("Expected a directory but found a file at \"{s}\""));
+    }
+    Ok(path)
+}
+
+fn run<R, D>(runner: R, driver: D, test_params: Option<Value>) -> Result<ExitCode>
+where
+    R: runner::Runner,
+    D: driver::Driver,
+{
+    let mut config = driver.config();
+    if let Some(params) = test_params {
+        config.merge_test_params(params);
+    }
+
+    let yaml =
+        serde_yaml::to_string(&config).context("Failed to convert antlion config to YAML")?;
+
+    let output_path = driver.output_path().to_path_buf();
+    let config_path = output_path.join("config.yaml");
+    println!("Writing {}", config_path.display());
+    println!("\n{yaml}\n");
+    fs::write(&config_path, yaml).context("Failed to write config to file")?;
+
+    let exit_code = runner.run(config_path).context("Failed to run antlion")?;
+    match exit_code {
+        ExitStatus::Ok => println!("Antlion successfully exited"),
+        ExitStatus::Err(code) => eprintln!("Antlion failed with status code {}", code),
+        ExitStatus::Interrupt(Some(code)) => eprintln!("Antlion interrupted by signal {}", code),
+        ExitStatus::Interrupt(None) => eprintln!("Antlion interrupted by signal"),
+    };
+    driver.teardown().context("Failed to teardown environment")?;
+    Ok(exit_code.into())
+}
+
+fn main() -> Result<ExitCode> {
+    let args: Args = argh::from_env();
+    let env = env::LocalEnvironment;
+    let runner =
+        runner::ProcessRunner { python_bin: args.python_bin, antlion_pyz: args.antlion_pyz };
+
+    let test_params = match args.test_params {
+        Some(path) => {
+            let text = fs::read_to_string(&path)
+                .with_context(|| format!("Failed to read file \"{}\"", path.display()))?;
+            let yaml = serde_yaml::from_str(&text)
+                .with_context(|| format!("Failed to parse \"{text}\" as YAML"))?;
+            Some(yaml)
+        }
+        None => None,
+    };
+
+    match InfraDriver::new(env, args.ssh_binary.clone(), args.ffx_binary.clone()) {
+        Ok(env) => return run(runner, env, test_params),
+        Err(InfraDriverError::NotDetected(_)) => {}
+        Err(InfraDriverError::Config(e)) => {
+            return Err(anyhow::Error::from(e).context("Config validation"))
+        }
+        Err(InfraDriverError::Other(e)) => {
+            return Err(anyhow::Error::from(e).context("Unexpected infra driver error"))
+        }
+    };
+
+    let env = driver::local::LocalDriver::new::<finder::MulticastDns>(
+        args.device.clone(),
+        args.ssh_binary.clone(),
+        args.ssh_key.clone(),
+        args.ffx_binary.clone(),
+        args.out_dir.clone(),
+    )
+    .context("Failed to detect local environment")?;
+
+    run(runner, env, test_params)
+}
diff --git a/runner/src/net.rs b/runner/src/net.rs
new file mode 100644
index 0000000..70db2eb
--- /dev/null
+++ b/runner/src/net.rs
@@ -0,0 +1,231 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use std::fmt::{Debug, Display};
+use std::marker::PhantomData;
+use std::net::{Ipv4Addr, Ipv6Addr};
+
+use netext::IsLocalAddr;
+use nix::net::if_::if_nametoindex;
+use serde::{Deserialize, Serialize};
+use thiserror::Error;
+
+/// IP address with support for IPv6 scope identifiers as defined in RFC 4007.
+#[derive(Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
+pub enum IpAddr {
+    /// An IPv4 address.
+    V4(Ipv4Addr),
+    /// An IPv6 address with optional scope identifier.
+    V6(Ipv6Addr, Option<u32>),
+}
+
+impl Into<std::net::IpAddr> for IpAddr {
+    fn into(self) -> std::net::IpAddr {
+        match self {
+            IpAddr::V4(ip) => std::net::IpAddr::from(ip),
+            IpAddr::V6(ip, _) => std::net::IpAddr::from(ip),
+        }
+    }
+}
+
+impl From<Ipv6Addr> for IpAddr {
+    fn from(value: Ipv6Addr) -> Self {
+        IpAddr::V6(value, None)
+    }
+}
+
+impl From<Ipv4Addr> for IpAddr {
+    fn from(value: Ipv4Addr) -> Self {
+        IpAddr::V4(value)
+    }
+}
+
+impl From<std::net::IpAddr> for IpAddr {
+    fn from(value: std::net::IpAddr) -> Self {
+        match value {
+            std::net::IpAddr::V4(ip) => IpAddr::from(ip),
+            std::net::IpAddr::V6(ip) => IpAddr::from(ip),
+        }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Error)]
+/// An error which can be returned when parsing an IP address with optional IPv6
+/// scope ID. See [`std::net::AddrParseError`].
+pub enum AddrParseError {
+    #[error(transparent)]
+    IpInvalid(#[from] std::net::AddrParseError),
+    #[error("no interface found with name \"{0}\"")]
+    InterfaceNotFound(String),
+    #[error("only IPv6 link-local may include a scope ID")]
+    /// Scope IDs are only supported for IPv6 link-local addresses as per RFC
+    /// 6874 Section 4.
+    ScopeNotSupported,
+}
+
+impl std::str::FromStr for IpAddr {
+    type Err = AddrParseError;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        let mut parts = s.splitn(2, '%');
+        let addr = parts.next().unwrap(); // first element is guaranteed
+        let ip = std::net::IpAddr::from_str(addr)?;
+        let scope = parts.next();
+        match (ip, scope) {
+            (std::net::IpAddr::V4(ip), None) => Ok(IpAddr::from(ip)),
+            (std::net::IpAddr::V4(_), Some(_)) => Err(AddrParseError::ScopeNotSupported),
+            (std::net::IpAddr::V6(ip), None) => Ok(IpAddr::V6(ip, None)),
+            (std::net::IpAddr::V6(ip), Some(scope)) => {
+                if !ip.is_link_local_addr() {
+                    return Err(AddrParseError::ScopeNotSupported);
+                }
+                if let Ok(index) = scope.parse::<u32>() {
+                    return Ok(IpAddr::V6(ip, Some(index)));
+                }
+                match if_nametoindex(scope) {
+                    Ok(index) => Ok(IpAddr::V6(ip, Some(index))),
+                    Err(_) => Err(AddrParseError::InterfaceNotFound(scope.to_string())),
+                }
+            }
+        }
+    }
+}
+
+impl Display for IpAddr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            IpAddr::V4(ip) => Display::fmt(ip, f),
+            IpAddr::V6(ip, None) => Display::fmt(ip, f),
+            IpAddr::V6(ip, Some(scope)) => {
+                Display::fmt(ip, f)?;
+                write!(f, "%{}", scope)
+            }
+        }
+    }
+}
+
+impl Debug for IpAddr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        Display::fmt(self, f)
+    }
+}
+
+impl Serialize for IpAddr {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: serde::Serializer,
+    {
+        serializer.serialize_str(self.to_string().as_str())
+    }
+}
+
+impl<'de> Deserialize<'de> for IpAddr {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: serde::Deserializer<'de>,
+    {
+        deserializer.deserialize_str(FromStrVisitor::new())
+    }
+}
+
+struct FromStrVisitor<T> {
+    ty: PhantomData<T>,
+}
+
+impl<T> FromStrVisitor<T> {
+    fn new() -> Self {
+        FromStrVisitor { ty: PhantomData }
+    }
+}
+
+impl<'de, T> serde::de::Visitor<'de> for FromStrVisitor<T>
+where
+    T: std::str::FromStr,
+    T::Err: std::fmt::Display,
+{
+    type Value = T;
+
+    fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        formatter.write_str("IP address")
+    }
+
+    fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+    where
+        E: serde::de::Error,
+    {
+        s.parse().map_err(serde::de::Error::custom)
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::{AddrParseError, IpAddr};
+    use assert_matches::assert_matches;
+
+    #[test]
+    fn parse_ip_invalid() {
+        assert_matches!("".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+        assert_matches!("192.168.1.".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+        assert_matches!("fe80:".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+    }
+
+    #[test]
+    fn parse_ipv4() {
+        assert_matches!(
+            "192.168.1.1".parse::<IpAddr>(),
+            Ok(IpAddr::V4(ip))
+                if ip == "192.168.1.1".parse::<std::net::Ipv4Addr>().unwrap()
+        );
+    }
+
+    #[test]
+    fn parse_ipv4_with_scope() {
+        assert_matches!(
+            "192.168.1.1%1".parse::<IpAddr>(),
+            Err(AddrParseError::ScopeNotSupported)
+        );
+    }
+
+    #[test]
+    fn parse_ipv6() {
+        assert_matches!(
+            "fe80::1".parse::<IpAddr>(),
+            Ok(IpAddr::V6(ip, None))
+                if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
+        );
+    }
+
+    #[test]
+    fn parse_ipv6_global_with_scope() {
+        assert_matches!("2001::1%1".parse::<IpAddr>(), Err(AddrParseError::ScopeNotSupported));
+    }
+
+    #[test]
+    fn parse_ipv6_link_local_with_scope() {
+        assert_matches!(
+            "fe80::1%1".parse::<IpAddr>(),
+            Ok(IpAddr::V6(ip, Some(scope)))
+                if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
+                && scope == 1
+        );
+    }
+
+    #[test]
+    fn parse_ipv6_link_local_with_scope_interface_not_found() {
+        // An empty scope ID should trigger a failed lookup.
+        assert_matches!(
+            "fe80::1%".parse::<IpAddr>(),
+            Err(AddrParseError::InterfaceNotFound(name))
+                if name == ""
+        );
+
+        // The trailing '%' forces a failed lookup. At the time of writing, no
+        // OS supports this character as part of interface names.
+        assert_matches!(
+            "fe80::1%eth0%".parse::<IpAddr>(),
+            Err(AddrParseError::InterfaceNotFound(name))
+                if name == "eth0%"
+        );
+    }
+}
diff --git a/runner/src/runner.rs b/runner/src/runner.rs
new file mode 100644
index 0000000..c40e05d
--- /dev/null
+++ b/runner/src/runner.rs
@@ -0,0 +1,83 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#[cfg(unix)]
+use std::os::unix::process::ExitStatusExt;
+use std::process::Command;
+use std::{path::PathBuf, process::ExitCode};
+
+use anyhow::{Context, Result};
+use itertools::Itertools;
+
+/// Runner for dispatching antlion.
+pub(crate) trait Runner {
+    /// Run antlion using the provided config and output directory.
+    fn run(&self, config: PathBuf) -> Result<ExitStatus>;
+}
+
+/// Executes antlion as a local process.
+pub(crate) struct ProcessRunner {
+    pub python_bin: String,
+    pub antlion_pyz: PathBuf,
+}
+
+impl Runner for ProcessRunner {
+    fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+        let args = [
+            &self.antlion_pyz.clone().into_os_string().into_string().unwrap(),
+            "--config",
+            &config.into_os_string().into_string().unwrap(),
+        ];
+
+        println!(
+            "Launching antlion to run: \"{} {}\"\n",
+            &self.python_bin,
+            args.iter().format(" "),
+        );
+
+        let status = Command::new(&self.python_bin)
+            .args(args)
+            .status()
+            .context("Failed to execute antlion")?;
+
+        Ok(ExitStatus::from(status))
+    }
+}
+
+/// Describes the result of a child process after it has terminated.
+pub(crate) enum ExitStatus {
+    /// Process terminated without error.
+    Ok,
+    /// Process terminated with a non-zero status code.
+    Err(i32),
+    /// Process was interrupted by a signal.
+    Interrupt(Option<i32>),
+}
+
+impl From<std::process::ExitStatus> for ExitStatus {
+    fn from(status: std::process::ExitStatus) -> Self {
+        match status.code() {
+            Some(0) => ExitStatus::Ok,
+            Some(code) => ExitStatus::Err(code),
+            None if cfg!(target_os = "unix") => ExitStatus::Interrupt(status.signal()),
+            None => ExitStatus::Interrupt(None),
+        }
+    }
+}
+
+impl Into<ExitCode> for ExitStatus {
+    fn into(self) -> ExitCode {
+        match self {
+            ExitStatus::Ok => ExitCode::SUCCESS,
+            ExitStatus::Err(code) => {
+                let code = match u8::try_from(code) {
+                    Ok(c) => c,
+                    Err(_) => 1,
+                };
+                ExitCode::from(code)
+            }
+            ExitStatus::Interrupt(_) => ExitCode::FAILURE,
+        }
+    }
+}
diff --git a/runner/src/yaml.rs b/runner/src/yaml.rs
new file mode 100644
index 0000000..ae972bf
--- /dev/null
+++ b/runner/src/yaml.rs
@@ -0,0 +1,95 @@
+use serde_yaml::Value;
+
+/// Merge `b` into `a`, appending arrays and overwriting everything else.
+pub fn merge(a: &mut Value, b: Value) {
+    match (a, b) {
+        (Value::Mapping(ref mut a), Value::Mapping(b)) => {
+            for (k, v) in b {
+                if !a.contains_key(&k) {
+                    a.insert(k, v);
+                } else {
+                    merge(&mut a[&k], v);
+                }
+            }
+        }
+        (Value::Sequence(ref mut a), Value::Sequence(ref mut b)) => {
+            a.append(b);
+        }
+        (a, b) => *a = b,
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    #[test]
+    fn test_merge_mapping() {
+        let a = "
+            test_params:
+                name: a
+                who_called:
+                    was_a: true
+        ";
+        let mut a: Value = serde_yaml::from_str(a).unwrap();
+        let b = "
+            test_params:
+                name: b
+                who_called:
+                    was_b: true
+        ";
+        let b: Value = serde_yaml::from_str(b).unwrap();
+        merge(&mut a, b);
+        let want = "
+            test_params:
+                name: b
+                who_called:
+                    was_a: true
+                    was_b: true
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_append_arrays() {
+        let mut a: Value = serde_yaml::from_str(" - a").unwrap();
+        let b: Value = serde_yaml::from_str(" - b").unwrap();
+        merge(&mut a, b);
+        let want = "
+            - a
+            - b
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_append_arrays_allow_duplicates() {
+        let mut a: Value = serde_yaml::from_str(" - a").unwrap();
+        let b: Value = serde_yaml::from_str(" - a").unwrap();
+        merge(&mut a, b);
+        let want = "
+            - a
+            - a
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_overwrite_from_null() {
+        let mut a: Value = Value::Null;
+        let b: Value = serde_yaml::from_str("true").unwrap();
+        merge(&mut a, b.clone());
+        assert_eq!(a, b);
+    }
+
+    #[test]
+    fn test_merge_overwrite_with_null() {
+        let mut a: Value = serde_yaml::from_str("true").unwrap();
+        let b: Value = Value::Null;
+        merge(&mut a, b.clone());
+        assert_eq!(a, b);
+    }
+}
diff --git a/setup.py b/setup.py
index 30f198d..28f080f 100644
--- a/setup.py
+++ b/setup.py
@@ -20,33 +20,32 @@
     "mobly==1.12.0",
     "pyyaml>=5.1",
     "tenacity~=8.0",
+    # TODO(b/240443856): Remove these dependencies once antlion runs in
+    # Fuchsia's LUCI infrastructure. These are needed for flashing and using
+    # mDNS discovery, which are unnecessary in the future infrastructure.
+    "usbinfo",
+    "psutil",
+    "zeroconf",
 ]
 
-setup(name='antlion',
-      version='0.2.0',
-      description = "Host-driven, hardware-agnostic Fuchsia connectivity tests",
-      license='Apache-2.0',
-      packages=find_packages(
-          where='src',
-      ),
-      package_dir={"": "src"},
-      include_package_data=True,
-      tests_require=[],
-      install_requires=install_requires,
-      extras_require={
-          'html_graphing': ['bokeh'],
-          'dev': ['shiv', 'toml', 'yapf'],
-          'digital_loggers_pdu': ['dlipower'],
-          'flash': ['usbinfo'],
-          'mdns': ['psutil', 'zeroconf'],
-          'android': [
-              'Monsoon',
-              'numpy',
-              'paramiko[ed25519]',
-              'pylibftdi',
-              'pyserial',
-              'requests',
-              'scapy',
-              'scipy',
-          ],
-      })
+setup(
+    name="antlion",
+    version="0.2.0",
+    description="Host-driven, hardware-agnostic Fuchsia connectivity tests",
+    license="Apache-2.0",
+    packages=find_packages(
+        where="src",
+    ),
+    package_dir={"": "src"},
+    include_package_data=True,
+    tests_require=[],
+    install_requires=install_requires,
+    extras_require={
+        "html_graphing": ["bokeh"],
+        "digital_loggers_pdu": ["dlipower"],
+        "android": [
+            "numpy",
+            "scapy",
+        ],
+    },
+)
diff --git a/src/antlion/asserts.py b/src/antlion/asserts.py
deleted file mode 100644
index ce0a7b1..0000000
--- a/src/antlion/asserts.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly.asserts import *
-
-
-# Have an instance of unittest.TestCase so we could reuse some logic from
-# python's own unittest.
-# _ProxyTest is required because py2 does not allow instantiating
-# unittest.TestCase directly.
-class _ProxyTest(unittest.TestCase):
-    def runTest(self):
-        pass
-
-
-_pyunit_proxy = _ProxyTest()
-
-
-def assert_almost_equal(first,
-                        second,
-                        places=7,
-                        msg=None,
-                        delta=None,
-                        extras=None):
-    """
-    Assert FIRST to be within +/- DELTA to SECOND, otherwise fail the
-    test.
-    :param first: The first argument, LHS
-    :param second: The second argument, RHS
-    :param places: For floating points, how many decimal places to look into
-    :param msg: Message to display on failure
-    :param delta: The +/- first and second could be apart from each other
-    :param extras: Extra object passed to test failure handler
-    :return:
-    """
-    my_msg = None
-    try:
-        if delta:
-            _pyunit_proxy.assertAlmostEqual(
-                first, second, msg=msg, delta=delta)
-        else:
-            _pyunit_proxy.assertAlmostEqual(
-                first, second, places=places, msg=msg)
-    except Exception as e:
-        my_msg = str(e)
-        if msg:
-            my_msg = "%s %s" % (my_msg, msg)
-    # This is a hack to remove the stacktrace produced by the above exception.
-    if my_msg is not None:
-        fail(my_msg, extras=extras)
diff --git a/src/antlion/base_test.py b/src/antlion/base_test.py
index 5e39933..5033552 100755
--- a/src/antlion/base_test.py
+++ b/src/antlion/base_test.py
@@ -22,7 +22,6 @@
 import traceback
 from concurrent.futures import ThreadPoolExecutor
 
-from antlion import asserts
 from antlion import error
 from antlion import keys
 from antlion import logger
@@ -39,6 +38,7 @@
 from antlion.event.event import TestClassEndEvent
 from antlion.event.subscription_bundle import SubscriptionBundle
 
+from mobly import asserts
 from mobly.base_test import BaseTestClass as MoblyBaseTest
 from mobly.records import ExceptionRecord
 
@@ -50,26 +50,24 @@
 @subscribe_static(TestCaseBeginEvent)
 def _logcat_log_test_begin(event):
     """Ensures that logcat is running. Write a logcat line indicating test case
-     begin."""
+    begin."""
     test_instance = event.test_class
     try:
-        for ad in getattr(test_instance, 'android_devices', []):
+        for ad in getattr(test_instance, "android_devices", []):
             if not ad.is_adb_logcat_on:
                 ad.start_adb_logcat()
             # Write test start token to adb log if android device is attached.
             if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s BEGIN %s" %
-                              (TEST_CASE_TOKEN, event.test_case_name))
+                ad.droid.logV("%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name))
 
     except error.ActsError as e:
         test_instance.results.error.append(
-            ExceptionRecord(e, 'Logcat for test begin: %s' %
-                            event.test_case_name))
-        test_instance.log.error('BaseTest setup_test error: %s' % e.details)
+            ExceptionRecord(e, "Logcat for test begin: %s" % event.test_case_name)
+        )
+        test_instance.log.error("BaseTest setup_test error: %s" % e.details)
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send BEGIN log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 @subscribe_static(TestCaseEndEvent)
@@ -78,20 +76,18 @@
     test_instance = event.test_class
     try:
         # Write test end token to adb log if android device is attached.
-        for ad in getattr(test_instance, 'android_devices', []):
+        for ad in getattr(test_instance, "android_devices", []):
             if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s END %s" %
-                              (TEST_CASE_TOKEN, event.test_case_name))
+                ad.droid.logV("%s END %s" % (TEST_CASE_TOKEN, event.test_case_name))
 
     except error.ActsError as e:
         test_instance.results.error.append(
-            ExceptionRecord(e,
-                            'Logcat for test end: %s' % event.test_case_name))
-        test_instance.log.error('BaseTest teardown_test error: %s' % e.details)
+            ExceptionRecord(e, "Logcat for test end: %s" % event.test_case_name)
+        )
+        test_instance.log.error("BaseTest teardown_test error: %s" % e.details)
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send END log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send END log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 @subscribe_static(TestCaseBeginEvent)
@@ -100,15 +96,15 @@
     Fuchsia device"""
     test_instance = event.test_class
     try:
-        for fd in getattr(test_instance, 'fuchsia_devices', []):
-            if hasattr(fd, '_sl4f'):
+        for fd in getattr(test_instance, "fuchsia_devices", []):
+            if hasattr(fd, "_sl4f"):
                 fd.sl4f.logging_lib.logI(
-                    "%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name))
+                    "%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name)
+                )
 
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send BEGIN log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 @subscribe_static(TestCaseEndEvent)
@@ -117,15 +113,15 @@
     Fuchsia device"""
     test_instance = event.test_class
     try:
-        for fd in getattr(test_instance, 'fuchsia_devices', []):
-            if hasattr(fd, '_sl4f'):
+        for fd in getattr(test_instance, "fuchsia_devices", []):
+            if hasattr(fd, "_sl4f"):
                 fd.sl4f.logging_lib.logI(
-                    "%s END %s" % (TEST_CASE_TOKEN, event.test_case_name))
+                    "%s END %s" % (TEST_CASE_TOKEN, event.test_case_name)
+                )
 
     except Exception as e:
-        test_instance.log.warning(
-            'Unable to send END log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
+        test_instance.log.warning("Unable to send END log command to all devices.")
+        test_instance.log.warning("Error: %s" % e)
 
 
 event_bus.register_subscription(_logcat_log_test_begin.subscription)
@@ -193,7 +189,8 @@
 
         self.consecutive_failures = 0
         self.consecutive_failure_limit = self.user_params.get(
-            'consecutive_failure_limit', -1)
+            "consecutive_failure_limit", -1
+        )
         self.size_limit_reached = False
         self.retryable_exceptions = signals.TestFailure
 
@@ -211,8 +208,7 @@
         for ctrl_name in keys.Config.builtin_controller_names.value:
             if ctrl_name in self.controller_configs:
                 module_name = keys.get_module_name(ctrl_name)
-                module = importlib.import_module("antlion.controllers.%s" %
-                                                 module_name)
+                module = importlib.import_module("antlion.controllers.%s" % module_name)
                 builtin_controllers.append(module)
         return builtin_controllers
 
@@ -224,7 +220,7 @@
         without needing to make changes.
         """
         for key, value in self.user_params.items():
-            if key.endswith('files') and isinstance(value, dict):
+            if key.endswith("files") and isinstance(value, dict):
                 new_user_params = dict(value)
                 new_user_params.update(self.user_params)
                 self.user_params = new_user_params
@@ -243,15 +239,12 @@
         Returns:
             A string corresponding to the module's name.
         """
-        if hasattr(a_module, 'ACTS_CONTROLLER_REFERENCE_NAME'):
+        if hasattr(a_module, "ACTS_CONTROLLER_REFERENCE_NAME"):
             return a_module.ACTS_CONTROLLER_REFERENCE_NAME
         else:
-            return a_module.__name__.split('.')[-1]
+            return a_module.__name__.split(".")[-1]
 
-    def register_controller(self,
-                            controller_module,
-                            required=True,
-                            builtin=False):
+    def register_controller(self, controller_module, required=True, builtin=False):
         """Registers an ACTS controller module for a test class. Invokes Mobly's
         implementation of register_controller.
 
@@ -318,7 +311,8 @@
 
         # Get controller objects from Mobly's register_controller
         controllers = self._controller_manager.register_controller(
-            controller_module, required=required)
+            controller_module, required=required
+        )
         if not controllers:
             return None
 
@@ -326,8 +320,7 @@
         # Implementation of "get_info" is optional for a controller module.
         if hasattr(controller_module, "get_info"):
             controller_info = controller_module.get_info(controllers)
-            self.log.info("Controller %s: %s", module_config_name,
-                          controller_info)
+            self.log.info("Controller %s: %s", module_config_name, controller_info)
 
         if builtin:
             setattr(self, module_ref_name, controllers)
@@ -359,7 +352,7 @@
 
         # Skip the test if the consecutive test case failure limit is reached.
         if self.consecutive_failures == self.consecutive_failure_limit:
-            raise signals.TestError('Consecutive test failure')
+            raise signals.TestError("Consecutive test failure")
 
         return self.setup_test()
 
@@ -379,7 +372,7 @@
         """Proxy function to guarantee the base implementation of teardown_test
         is called.
         """
-        self.log.debug('Tearing down test %s' % test_name)
+        self.log.debug("Tearing down test %s" % test_name)
         self.teardown_test()
 
     def _on_fail(self, record):
@@ -502,8 +495,11 @@
         except signals.TestAbortAll:
             raise
         except Exception as e:
-            self.log.exception("Exception happened when executing %s for %s.",
-                               func.__name__, self.current_test_name)
+            self.log.exception(
+                "Exception happened when executing %s for %s.",
+                func.__name__,
+                self.current_test_name,
+            )
             tr_record.add_error(func.__name__, e)
 
     def exec_one_testcase(self, test_name, test_func):
@@ -527,8 +523,8 @@
         self.log.info("%s %s", TEST_CASE_TOKEN, test_name)
 
         # Enable test retry if specified in the ACTS config
-        retry_tests = self.user_params.get('retry_tests', [])
-        full_test_name = '%s.%s' % (class_name, self.test_name)
+        retry_tests = self.user_params.get("retry_tests", [])
+        full_test_name = "%s.%s" % (class_name, self.test_name)
         if any(name in retry_tests for name in [class_name, full_test_name]):
             test_func = self.get_func_with_retry(test_func)
 
@@ -537,8 +533,9 @@
         try:
             try:
                 ret = self._setup_test(self.test_name)
-                asserts.assert_true(ret is not False,
-                                    "Setup for %s failed." % test_name)
+                asserts.assert_true(
+                    ret is not False, "Setup for %s failed." % test_name
+                )
                 verdict = test_func()
             finally:
                 try:
@@ -551,7 +548,8 @@
         except (signals.TestFailure, AssertionError) as e:
             test_signal = e
             if self.user_params.get(
-                    keys.Config.key_test_failure_tracebacks.value, False):
+                keys.Config.key_test_failure_tracebacks.value, False
+            ):
                 self.log.exception(e)
             tr_record.test_fail(e)
         except signals.TestSkip as e:
@@ -594,11 +592,11 @@
                     self._exec_procedure_func(self._on_fail, tr_record)
             finally:
                 self.results.add_record(tr_record)
-                self.summary_writer.dump(tr_record.to_dict(),
-                                         records.TestSummaryEntryType.RECORD)
+                self.summary_writer.dump(
+                    tr_record.to_dict(), records.TestSummaryEntryType.RECORD
+                )
                 self.current_test_name = None
-                event_bus.post(
-                    TestCaseEndEvent(self, self.test_name, test_signal))
+                event_bus.post(TestCaseEndEvent(self, self.test_name, test_signal))
 
     def get_func_with_retry(self, func, attempts=2):
         """Returns a wrapped test method that re-runs after failure. Return test
@@ -626,23 +624,25 @@
                     return func(*args, **kwargs)
                 except exceptions as e:
                     retry = True
-                    msg = 'Failure on attempt %d: %s' % (i + 1, e.details)
+                    msg = "Failure on attempt %d: %s" % (i + 1, e.details)
                     self.log.warning(msg)
                     error_msgs.append(msg)
                     if e.extras:
-                        extras['Attempt %d' % (i + 1)] = e.extras
-            raise signals.TestFailure('\n'.join(error_msgs), extras)
+                        extras["Attempt %d" % (i + 1)] = e.extras
+            raise signals.TestFailure("\n".join(error_msgs), extras)
 
         return wrapper
 
-    def run_generated_testcases(self,
-                                test_func,
-                                settings,
-                                args=None,
-                                kwargs=None,
-                                tag="",
-                                name_func=None,
-                                format_args=False):
+    def run_generated_testcases(
+        self,
+        test_func,
+        settings,
+        args=None,
+        kwargs=None,
+        tag="",
+        name_func=None,
+        format_args=False,
+    ):
         """Deprecated. Please use pre_run and generate_tests.
 
         Generated test cases are not written down as functions, but as a list
@@ -681,27 +681,31 @@
                 try:
                     test_name = name_func(setting, *args, **kwargs)
                 except:
-                    self.log.exception(("Failed to get test name from "
-                                        "test_func. Fall back to default %s"),
-                                       test_name)
+                    self.log.exception(
+                        (
+                            "Failed to get test name from "
+                            "test_func. Fall back to default %s"
+                        ),
+                        test_name,
+                    )
 
             self.results.requested.append(test_name)
 
             if len(test_name) > utils.MAX_FILENAME_LEN:
-                test_name = test_name[:utils.MAX_FILENAME_LEN]
+                test_name = test_name[: utils.MAX_FILENAME_LEN]
 
             previous_success_cnt = len(self.results.passed)
 
             if format_args:
                 self.exec_one_testcase(
                     test_name,
-                    functools.partial(test_func, *(args + (setting, )),
-                                      **kwargs))
+                    functools.partial(test_func, *(args + (setting,)), **kwargs),
+                )
             else:
                 self.exec_one_testcase(
                     test_name,
-                    functools.partial(test_func, *((setting, ) + args),
-                                      **kwargs))
+                    functools.partial(test_func, *((setting,) + args), **kwargs),
+                )
 
             if len(self.results.passed) - previous_success_cnt != 1:
                 failed_settings.append(setting)
@@ -727,11 +731,12 @@
         except signals.TestAbortAll:
             raise
         except:
-            self.log.exception("Exception happened when executing %s in %s.",
-                               func.__name__, self.TAG)
+            self.log.exception(
+                "Exception happened when executing %s in %s.", func.__name__, self.TAG
+            )
             return False
 
-    def _block_all_test_cases(self, tests, reason='Failed class setup'):
+    def _block_all_test_cases(self, tests, reason="Failed class setup"):
         """
         Block all passed in test cases.
         Args:
@@ -743,12 +748,13 @@
             signal = signals.TestError(reason)
             record = records.TestResultRecord(test_name, self.TAG)
             record.test_begin()
-            if hasattr(test_func, 'gather'):
+            if hasattr(test_func, "gather"):
                 signal.extras = test_func.gather()
             record.test_error(signal)
             self.results.add_record(record)
-            self.summary_writer.dump(record.to_dict(),
-                                     records.TestSummaryEntryType.RECORD)
+            self.summary_writer.dump(
+                record.to_dict(), records.TestSummaryEntryType.RECORD
+            )
             self._on_skip(record)
 
     def run(self, test_names=None):
@@ -788,14 +794,18 @@
             matches = []
             for test_name in test_names:
                 for valid_test in valid_tests:
-                    if (fnmatch.fnmatch(valid_test, test_name)
-                            and valid_test not in matches):
+                    if (
+                        fnmatch.fnmatch(valid_test, test_name)
+                        and valid_test not in matches
+                    ):
                         matches.append(valid_test)
         else:
             matches = valid_tests
         self.results.requested = matches
-        self.summary_writer.dump(self.results.requested_test_names_dict(),
-                                 records.TestSummaryEntryType.TEST_NAME_LIST)
+        self.summary_writer.dump(
+            self.results.requested_test_names_dict(),
+            records.TestSummaryEntryType.TEST_NAME_LIST,
+        )
         tests = self._get_test_methods(matches)
 
         # Setup for the class.
@@ -806,7 +816,7 @@
                 self._block_all_test_cases(tests)
                 setup_fail = True
         except signals.TestAbortClass:
-            self.log.exception('Test class %s aborted' % self.TAG)
+            self.log.exception("Test class %s aborted" % self.TAG)
             setup_fail = True
         except Exception as e:
             self.log.exception("Failed to setup %s.", self.TAG)
@@ -814,17 +824,21 @@
             setup_fail = True
         if setup_fail:
             self._exec_func(self._teardown_class)
-            self.log.info("Summary for test class %s: %s", self.TAG,
-                          self.results.summary_str())
+            self.log.info(
+                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
+            )
             return self.results
 
         # Run tests in order.
         test_case_iterations = self.user_params.get(
-            keys.Config.key_test_case_iterations.value, 1)
-        if any([
+            keys.Config.key_test_case_iterations.value, 1
+        )
+        if any(
+            [
                 substr in self.__class__.__name__
-                for substr in ['Preflight', 'Postflight']
-        ]):
+                for substr in ["Preflight", "Postflight"]
+            ]
+        ):
             test_case_iterations = 1
         try:
             for test_name, test_func in tests:
@@ -832,7 +846,7 @@
                     self.exec_one_testcase(test_name, test_func)
             return self.results
         except signals.TestAbortClass:
-            self.log.exception('Test class %s aborted' % self.TAG)
+            self.log.exception("Test class %s aborted" % self.TAG)
             return self.results
         except signals.TestAbortAll as e:
             # Piggy-back test results on this exception object so we don't lose
@@ -841,8 +855,9 @@
             raise e
         finally:
             self._exec_func(self._teardown_class)
-            self.log.info("Summary for test class %s: %s", self.TAG,
-                          self.results.summary_str())
+            self.log.info(
+                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
+            )
 
     def _ad_take_bugreport(self, ad, test_name, begin_time):
         for i in range(3):
@@ -863,15 +878,17 @@
             try:
                 ad.get_qxdm_logs(test_name, qxdm_begin_time)
             except Exception as e:
-                ad.log.error("Failed to get QXDM log for %s with error %s",
-                             test_name, e)
+                ad.log.error(
+                    "Failed to get QXDM log for %s with error %s", test_name, e
+                )
                 result = False
 
         try:
             ad.check_crash_report(test_name, begin_time, log_crash_report=True)
         except Exception as e:
-            ad.log.error("Failed to check crash report for %s with error %s",
-                         test_name, e)
+            ad.log.error(
+                "Failed to check crash report for %s with error %s", test_name, e
+            )
             result = False
         return result
 
@@ -890,15 +907,13 @@
         # problematic tests, we skip bugreport and other failure artifact
         # creation.
         class_name = self.__class__.__name__
-        quiet_tests = self.user_params.get('quiet_tests', [])
+        quiet_tests = self.user_params.get("quiet_tests", [])
         if class_name in quiet_tests:
-            self.log.info(
-                "Skipping bug report, as directed for this test class.")
+            self.log.info("Skipping bug report, as directed for this test class.")
             return True
-        full_test_name = '%s.%s' % (class_name, test_name)
+        full_test_name = "%s.%s" % (class_name, test_name)
         if full_test_name in quiet_tests:
-            self.log.info(
-                "Skipping bug report, as directed for this test case.")
+            self.log.info("Skipping bug report, as directed for this test case.")
             return True
 
         # Once we hit a certain log path size, it's not going to get smaller.
@@ -907,7 +922,8 @@
             return True
         try:
             max_log_size = int(
-                self.user_params.get("soft_output_size_limit") or "invalid")
+                self.user_params.get("soft_output_size_limit") or "invalid"
+            )
             log_path = getattr(logging, "log_path", None)
             if log_path:
                 curr_log_size = utils.get_directory_size(log_path)
@@ -926,10 +942,9 @@
             return
 
         executor = ThreadPoolExecutor(max_workers=10)
-        for ad in getattr(self, 'android_devices', []):
+        for ad in getattr(self, "android_devices", []):
             executor.submit(self._ad_take_bugreport, ad, test_name, begin_time)
-            executor.submit(self._ad_take_extra_logs, ad, test_name,
-                            begin_time)
+            executor.submit(self._ad_take_extra_logs, ad, test_name, begin_time)
         executor.shutdown()
 
     def _reboot_device(self, ad):
@@ -937,24 +952,23 @@
         ad = ad.reboot()
 
     def _cleanup_logger_sessions(self):
-        for (mylogger, session) in self.logger_sessions:
-            self.log.info("Resetting a diagnostic session %s, %s", mylogger,
-                          session)
+        for mylogger, session in self.logger_sessions:
+            self.log.info("Resetting a diagnostic session %s, %s", mylogger, session)
             mylogger.reset()
         self.logger_sessions = []
 
     def _pull_diag_logs(self, test_name, begin_time):
-        for (mylogger, session) in self.logger_sessions:
+        for mylogger, session in self.logger_sessions:
             self.log.info("Pulling diagnostic session %s", mylogger)
             mylogger.stop(session)
             diag_path = os.path.join(
-                self.log_path, logger.epoch_to_log_line_timestamp(begin_time))
+                self.log_path, logger.epoch_to_log_line_timestamp(begin_time)
+            )
             os.makedirs(diag_path, exist_ok=True)
             mylogger.pull(session, diag_path)
 
     def register_test_class_event_subscriptions(self):
-        self.class_subscriptions = subscription_bundle.create_from_instance(
-            self)
+        self.class_subscriptions = subscription_bundle.create_from_instance(self)
         self.class_subscriptions.register()
 
     def unregister_test_class_event_subscriptions(self):
diff --git a/src/antlion/bin/act.py b/src/antlion/bin/act.py
index 81d0452..2f78645 100755
--- a/src/antlion/bin/act.py
+++ b/src/antlion/bin/act.py
@@ -54,8 +54,7 @@
     except signals.TestAbortAll:
         return True
     except:
-        print("Exception when executing %s, iteration %s." %
-              (runner.testbed_name, i))
+        print("Exception when executing %s, iteration %s." % (runner.testbed_name, i))
         print(traceback.format_exc())
     finally:
         runner.stop()
@@ -109,8 +108,10 @@
             ret = _run_test(c, test_identifiers, repeat)
             ok = ok and ret
         except Exception as e:
-            print("Exception occurred when executing test bed %s. %s" %
-                  (c.testbed_name, e))
+            print(
+                "Exception occurred when executing test bed %s. %s"
+                % (c.testbed_name, e)
+            )
     return ok
 
 
@@ -122,63 +123,80 @@
     functions and acts.test_runner.execute_one_test_class.
     """
     parser = argparse.ArgumentParser(
-        description=("Specify tests to run. If nothing specified, "
-                     "run all test cases found."))
-    parser.add_argument('-c',
-                        '--config',
-                        type=str,
-                        required=True,
-                        metavar="<PATH>",
-                        help="Path to the test configuration file.")
+        description=(
+            "Specify tests to run. If nothing specified, " "run all test cases found."
+        )
+    )
     parser.add_argument(
-        '-ci',
-        '--campaign_iterations',
+        "-c",
+        "--config",
+        type=str,
+        required=True,
+        metavar="<PATH>",
+        help="Path to the test configuration file.",
+    )
+    parser.add_argument(
+        "-ci",
+        "--campaign_iterations",
         metavar="<CAMPAIGN_ITERATIONS>",
-        nargs='?',
+        nargs="?",
         type=int,
         const=1,
         default=1,
-        help="Number of times to run the campaign or a group of test cases.")
-    parser.add_argument('-tb',
-                        '--testbed',
-                        nargs='+',
-                        type=str,
-                        metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]",
-                        help="Specify which test beds to run tests on.")
-    parser.add_argument('-lp',
-                        '--logpath',
-                        type=str,
-                        metavar="<PATH>",
-                        help="Root path under which all logs will be placed.")
+        help="Number of times to run the campaign or a group of test cases.",
+    )
     parser.add_argument(
-        '-tp',
-        '--testpaths',
-        nargs='*',
+        "-tb",
+        "--testbed",
+        nargs="+",
+        type=str,
+        metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]",
+        help="Specify which test beds to run tests on.",
+    )
+    parser.add_argument(
+        "-lp",
+        "--logpath",
+        type=str,
+        metavar="<PATH>",
+        help="Root path under which all logs will be placed.",
+    )
+    parser.add_argument(
+        "-tp",
+        "--testpaths",
+        nargs="*",
         type=str,
         metavar="<PATH> <PATH>",
-        help="One or more non-recursive test class search paths.")
+        help="One or more non-recursive test class search paths.",
+    )
 
     group = parser.add_mutually_exclusive_group(required=True)
-    group.add_argument('-tc',
-                       '--testclass',
-                       nargs='+',
-                       type=str,
-                       metavar="[TestClass1 TestClass2:test_xxx ...]",
-                       help="A list of test classes/cases to run.")
     group.add_argument(
-        '-tf',
-        '--testfile',
+        "-tc",
+        "--testclass",
+        nargs="+",
+        type=str,
+        metavar="[TestClass1 TestClass2:test_xxx ...]",
+        help="A list of test classes/cases to run.",
+    )
+    group.add_argument(
+        "-tf",
+        "--testfile",
         nargs=1,
         type=str,
         metavar="<PATH>",
-        help=("Path to a file containing a comma delimited list of test "
-              "classes to run."))
-    parser.add_argument('-ti',
-                        '--test_case_iterations',
-                        metavar="<TEST_CASE_ITERATIONS>",
-                        nargs='?',
-                        type=int,
-                        help="Number of times to run every test case.")
+        help=(
+            "Path to a file containing a comma delimited list of test "
+            "classes to run."
+        ),
+    )
+    parser.add_argument(
+        "-ti",
+        "--test_case_iterations",
+        metavar="<TEST_CASE_ITERATIONS>",
+        nargs="?",
+        type=int,
+        help="Number of times to run every test case.",
+    )
 
     args = parser.parse_args(sys.argv[1:])
     test_list = None
@@ -186,12 +204,18 @@
         test_list = config_parser.parse_test_file(args.testfile[0])
     elif args.testclass:
         test_list = args.testclass
-    if re.search(r'\.ya?ml$', args.config):
-        parsed_configs = mobly_config_parser.load_test_config_file(
-            args.config, args.testbed)
-    else:
-        parsed_configs = config_parser.load_test_config_file(
-            args.config, args.testbed)
+
+    config = args.config
+
+    if config.endswith(".json"):
+        print(
+            "DEPRECATION NOTICE: Converting ACTS JSON to Mobly YAML. ACTS is "
+            + "deprecated. Support will be removed in the next release."
+        )
+        config = utils.acts_json_to_mobly_yaml(config)
+        print(f"Wrote YAML config to {config}")
+
+    parsed_configs = mobly_config_parser.load_test_config_file(config, args.testbed)
 
     for test_run_config in parsed_configs:
         if args.testpaths:
@@ -205,10 +229,12 @@
 
         # Sets the --testpaths flag to the default test directory if left unset.
         testpath_key = keys.Config.key_test_paths.value
-        if (testpath_key not in test_run_config.controller_configs
-                or test_run_config.controller_configs[testpath_key] is None):
+        if (
+            testpath_key not in test_run_config.controller_configs
+            or test_run_config.controller_configs[testpath_key] is None
+        ):
             test_run_config.controller_configs[testpath_key] = [
-                os.path.join(os.path.dirname(__file__), '../tests/'),
+                os.path.join(os.path.dirname(__file__), "../tests/"),
             ]
 
         for path in test_run_config.controller_configs[testpath_key]:
@@ -217,15 +243,25 @@
         # TODO(markdr): Find a way to merge this with the validation done in
         # Mobly's load_test_config_file.
         if not test_run_config.log_path:
-            raise ActsConfigError("Required key %s missing in test config." %
-                                  keys.Config.key_log_path.value)
+            raise ActsConfigError(
+                "Required key %s missing in test config."
+                % keys.Config.key_log_path.value
+            )
         test_run_config.log_path = utils.abs_path(test_run_config.log_path)
 
     # Prepare args for test runs
     test_identifiers = config_parser.parse_test_list(test_list)
 
-    exec_result = _run_tests(parsed_configs, test_identifiers,
-                             args.campaign_iterations)
+    print(
+        "\n\nDEPRECATION NOTICE: Running antlion tests with act.py is "
+        "deprecated and will be removed in the next release. Please migrate "
+        "by using Mobly YAML configs and executing the test class directly:\n\n"
+    )
+    for test_class, _ in test_identifiers:
+        print(f"   python {test_class}.py -c {config}")
+    print("\n")
+
+    exec_result = _run_tests(parsed_configs, test_identifiers, args.campaign_iterations)
     if exec_result is False:
         # return 1 upon test failure.
         sys.exit(1)
diff --git a/src/antlion/libs/utils/__init__.py b/src/antlion/capabilities/__init__.py
similarity index 100%
rename from src/antlion/libs/utils/__init__.py
rename to src/antlion/capabilities/__init__.py
diff --git a/src/antlion/capabilities/ssh.py b/src/antlion/capabilities/ssh.py
new file mode 100644
index 0000000..eeb1e16
--- /dev/null
+++ b/src/antlion/capabilities/ssh.py
@@ -0,0 +1,377 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import time
+
+from dataclasses import dataclass
+from typing import List, Union, BinaryIO
+
+from antlion import logger
+from antlion import signals
+from antlion.net import wait_for_port
+
+DEFAULT_SSH_PORT: int = 22
+DEFAULT_SSH_TIMEOUT_SEC: int = 60
+DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 90
+DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
+# The default package repository for all components.
+
+
+class SSHResult:
+    """Result of an SSH command."""
+
+    def __init__(
+        self, process: Union[subprocess.CompletedProcess, subprocess.CalledProcessError]
+    ) -> None:
+        self._raw_stdout = process.stdout
+        self._stderr = process.stderr.decode("utf-8", errors="replace")
+        self._exit_status: int = process.returncode
+
+    def __str__(self):
+        if self.exit_status == 0:
+            return self.stdout
+        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
+
+    @property
+    def stdout(self) -> str:
+        if not hasattr(self, "_stdout"):
+            self._stdout = self._raw_stdout.decode("utf-8", errors="replace")
+        return self._stdout
+
+    @property
+    def stderr(self) -> str:
+        return self._stderr
+
+    @property
+    def exit_status(self) -> int:
+        return self._exit_status
+
+    @property
+    def raw_stdout(self) -> bytes:
+        return self._raw_stdout
+
+
+class SSHError(signals.TestError):
+    """A SSH command returned with a non-zero status code."""
+
+    def __init__(self, command: str, result: SSHResult):
+        super().__init__(f'SSH command "{command}" unexpectedly returned {result}')
+        self.result = result
+
+
+class SSHTimeout(signals.TestError):
+    """A SSH command timed out."""
+
+    def __init__(self, err: subprocess.TimeoutExpired):
+        super().__init__(
+            f'SSH command "{err.cmd}" timed out after {err.timeout}s, '
+            f'stdout="{err.stdout}", stderr="{err.stderr}"'
+        )
+
+
+class SSHTransportError(signals.TestError):
+    """Failure to send an SSH command."""
+
+
+@dataclass
+class SSHConfig:
+    """SSH client config."""
+
+    # SSH flags. See ssh(1) for full details.
+    user: str
+    host_name: str
+    identity_file: str
+
+    ssh_binary: str = "ssh"
+    config_file: str = "/dev/null"
+    port: int = 22
+
+    # SSH options. See ssh_config(5) for full details.
+    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
+    strict_host_key_checking: bool = False
+    user_known_hosts_file: str = "/dev/null"
+    log_level: str = "ERROR"
+
+    def full_command(self, command: str, force_tty: bool = False) -> List[str]:
+        """Generate the complete command to execute command over SSH.
+
+        Args:
+            command: The command to run over SSH
+            force_tty: Force pseudo-terminal allocation. This can be used to
+                execute arbitrary screen-based programs on a remote machine,
+                which can be very useful, e.g. when implementing menu services.
+
+        Returns:
+            Arguments composing the complete call to SSH.
+        """
+        optional_flags = []
+        if force_tty:
+            # Multiple -t options force tty allocation, even if ssh has no local
+            # tty. This is necessary for launching ssh with subprocess without
+            # shell=True.
+            optional_flags.append("-tt")
+
+        return (
+            [
+                self.ssh_binary,
+                # SSH flags
+                "-i",
+                self.identity_file,
+                "-F",
+                self.config_file,
+                "-p",
+                str(self.port),
+                # SSH configuration options
+                "-o",
+                f"ConnectTimeout={self.connect_timeout}",
+                "-o",
+                f"ServerAliveInterval={self.server_alive_interval}",
+                "-o",
+                f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
+                "-o",
+                f"UserKnownHostsFile={self.user_known_hosts_file}",
+                "-o",
+                f"LogLevel={self.log_level}",
+            ]
+            + optional_flags
+            + [f"{self.user}@{self.host_name}"]
+            + command.split()
+        )
+
+
+class SSHProvider:
+    """Device-specific provider for SSH clients."""
+
+    def __init__(self, config: SSHConfig) -> None:
+        """
+        Args:
+            config: SSH client config
+        """
+        logger_tag = f"ssh | {config.host_name}"
+        if config.port != DEFAULT_SSH_PORT:
+            logger_tag += f":{config.port}"
+
+        # Check if the private key exists
+
+        self.log = logger.create_tagged_trace_logger(logger_tag)
+        self.config = config
+
+        try:
+            self.wait_until_reachable()
+            self.log.info("sshd is reachable")
+        except Exception as e:
+            raise TimeoutError("sshd is unreachable") from e
+
+    def wait_until_reachable(self) -> None:
+        """Wait for the device to become reachable via SSH.
+
+        Raises:
+            TimeoutError: connect_timeout has expired without a successful SSH
+                connection to the device
+            SSHTransportError: SSH is available on the device but
+                connect_timeout has expired and SSH fails to run
+            SSHTimeout: SSH is available on the device but connect_timeout has
+                expired and SSH takes too long to run a command
+        """
+        timeout_sec = self.config.connect_timeout
+        timeout = time.time() + timeout_sec
+        wait_for_port(self.config.host_name, self.config.port, timeout_sec=timeout_sec)
+
+        while True:
+            try:
+                self._run("echo", timeout_sec, False, None)
+                return
+            except SSHTransportError as e:
+                # Repeat if necessary; _run() can exit prematurely by receiving
+                # SSH transport errors. These errors can be caused by sshd not
+                # being fully initialized yet.
+                if time.time() < timeout:
+                    continue
+                else:
+                    raise e
+
+    def wait_until_unreachable(
+        self, interval_sec: int = 1, timeout_sec: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    ) -> None:
+        """Wait for the device to become unreachable via SSH.
+
+        Args:
+            interval_sec: Seconds to wait between unreachability attempts
+            timeout_sec: Seconds to wait until raising TimeoutError
+
+        Raises:
+            TimeoutError: when timeout_sec has expired without an unsuccessful
+                SSH connection to the device
+        """
+        timeout = time.time() + timeout_sec
+
+        while True:
+            try:
+                wait_for_port(
+                    self.config.host_name, self.config.port, timeout_sec=interval_sec
+                )
+            except TimeoutError:
+                return
+
+            if time.time() < timeout:
+                raise TimeoutError(
+                    f"Connection to {self.config.host_name} is still reachable "
+                    f"after {timeout_sec}s"
+                )
+
+    def run(
+        self,
+        command: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+        force_tty: bool = False,
+    ) -> SSHResult:
+        """Run a command on the device then exit.
+
+        Args:
+            command: String to send to the device.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+            force_tty: Force pseudo-terminal allocation.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+
+        Returns:
+            SSHResults from the executed command.
+        """
+        return self._run_with_retry(
+            command, timeout_sec, connect_retries, force_tty, stdin=None
+        )
+
+    def _run_with_retry(
+        self,
+        command: str,
+        timeout_sec: int,
+        connect_retries: int,
+        force_tty: bool,
+        stdin: BinaryIO,
+    ) -> SSHResult:
+        err: Exception = ValueError("connect_retries cannot be 0")
+        for i in range(0, connect_retries):
+            try:
+                return self._run(command, timeout_sec, force_tty, stdin)
+            except SSHTransportError as e:
+                err = e
+                self.log.warn(f"Connect failed: {e}")
+        raise err
+
+    def _run(
+        self, command: str, timeout_sec: int, force_tty: bool, stdin: BinaryIO
+    ) -> SSHResult:
+        full_command = self.config.full_command(command, force_tty)
+        self.log.debug(
+            f'Running "{command}" (full command: "{" ".join(full_command)}")'
+        )
+        try:
+            process = subprocess.run(
+                full_command,
+                capture_output=True,
+                timeout=timeout_sec,
+                check=True,
+                stdin=stdin,
+            )
+        except subprocess.CalledProcessError as e:
+            if e.returncode == 255:
+                stderr = e.stderr.decode("utf-8", errors="replace")
+                if (
+                    "Name or service not known" in stderr
+                    or "Host does not exist" in stderr
+                ):
+                    raise SSHTransportError(
+                        f"Hostname {self.config.host_name} cannot be resolved to an address"
+                    ) from e
+                if "Connection timed out" in stderr:
+                    raise SSHTransportError(
+                        f"Failed to establish a connection to {self.config.host_name} within {timeout_sec}s"
+                    ) from e
+                if "Connection refused" in stderr:
+                    raise SSHTransportError(
+                        f"Connection refused by {self.config.host_name}"
+                    ) from e
+
+            raise SSHError(command, SSHResult(e)) from e
+        except subprocess.TimeoutExpired as e:
+            raise SSHTimeout(e) from e
+
+        return SSHResult(process)
+
+    def upload_file(
+        self,
+        local_path: str,
+        remote_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            local_path: Path to the file to upload
+            remote_path: Path on the remote device to place the uploaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH upload returns a non-zero status code
+            SSHTransportError: if SSH fails to run the upload command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        file = open(local_path, "rb")
+        self._run_with_retry(
+            f"cat > {remote_path}",
+            timeout_sec,
+            connect_retries,
+            force_tty=False,
+            stdin=file,
+        )
+
+    def download_file(
+        self,
+        remote_path: str,
+        local_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            remote_path: Path on the remote device to download.
+            local_path: Path on the host to the place the downloaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        file = open(local_path, "rb")
+        return self._run_with_retry(
+            f"cat > {remote_path}",
+            timeout_sec,
+            connect_retries,
+            force_tty=False,
+            stdin=file,
+        )
diff --git a/src/antlion/config_parser.py b/src/antlion/config_parser.py
index 0cfb308..7f202ff 100755
--- a/src/antlion/config_parser.py
+++ b/src/antlion/config_parser.py
@@ -23,12 +23,12 @@
 from antlion import utils
 
 # An environment variable defining the base location for ACTS logs.
-_ENV_ACTS_LOGPATH = 'ACTS_LOGPATH'
+_ENV_ACTS_LOGPATH = "ACTS_LOGPATH"
 # An environment variable that enables test case failures to log stack traces.
-_ENV_TEST_FAILURE_TRACEBACKS = 'ACTS_TEST_FAILURE_TRACEBACKS'
+_ENV_TEST_FAILURE_TRACEBACKS = "ACTS_TEST_FAILURE_TRACEBACKS"
 # An environment variable defining the test search paths for ACTS.
-_ENV_ACTS_TESTPATHS = 'ACTS_TESTPATHS'
-_PATH_SEPARATOR = ':'
+_ENV_ACTS_TESTPATHS = "ACTS_TESTPATHS"
+_PATH_SEPARATOR = ":"
 
 
 class ActsConfigError(Exception):
@@ -43,13 +43,11 @@
     for k in keys.Config.reserved_keys.value:
         # TODO(markdr): Remove this continue after merging this with the
         # validation done in Mobly's load_test_config_file.
-        if (k == keys.Config.key_test_paths.value
-                or k == keys.Config.key_log_path.value):
+        if k == keys.Config.key_test_paths.value or k == keys.Config.key_log_path.value:
             continue
 
         if k not in test_config:
-            raise ActsConfigError("Required key %s missing in test config." %
-                                  k)
+            raise ActsConfigError("Required key %s missing in test config." % k)
 
 
 def _validate_testbed_name(name):
@@ -70,58 +68,26 @@
         raise ActsConfigError("Test bed names have to be string.")
     for l in name:
         if l not in utils.valid_filename_chars:
-            raise ActsConfigError(
-                "Char '%s' is not allowed in test bed names." % l)
+            raise ActsConfigError("Char '%s' is not allowed in test bed names." % l)
 
 
-def _update_file_paths(config, config_path):
-    """ Checks if the path entries are valid.
-
-    If the file path is invalid, assume it is a relative path and append
-    that to the config file path.
-
-    Args:
-        config : the config object to verify.
-        config_path : The path to the config file, which can be used to
-                      generate absolute paths from relative paths in configs.
-
-    Raises:
-        If the file path is invalid, ActsConfigError is raised.
-    """
-    # Check the file_path_keys and update if it is a relative path.
-    for file_path_key in keys.Config.file_path_keys.value:
-        if file_path_key in config:
-            config_file = config[file_path_key]
-            if type(config_file) is str:
-                if not os.path.isfile(config_file):
-                    config_file = os.path.join(config_path, config_file)
-                if not os.path.isfile(config_file):
-                    raise ActsConfigError(
-                        "Unable to load config %s from test "
-                        "config file.", config_file)
-                config[file_path_key] = config_file
-
-
-def _validate_testbed_configs(testbed_configs, config_path):
+def _validate_testbed_configs(testbed_configs):
     """Validates the testbed configurations.
 
     Args:
         testbed_configs: A list of testbed configuration json objects.
-        config_path : The path to the config file, which can be used to
-                      generate absolute paths from relative paths in configs.
 
     Raises:
         If any part of the configuration is invalid, ActsConfigError is raised.
     """
     # Cross checks testbed configs for resource conflicts.
-    for name, config in testbed_configs.items():
-        _update_file_paths(config, config_path)
+    for name in testbed_configs:
         _validate_testbed_name(name)
 
 
 def gen_term_signal_handler(test_runners):
     def termination_sig_handler(signal_num, frame):
-        print('Received sigterm %s.' % signal_num)
+        print("Received sigterm %s." % signal_num)
         for t in test_runners:
             t.stop()
         sys.exit(1)
@@ -141,7 +107,7 @@
         name, the list of strings is a list of test case names. The list can be
         None.
     """
-    tokens = item.split(':')
+    tokens = item.split(":")
     if len(tokens) > 2:
         raise ActsConfigError("Syntax error in test specifier %s" % item)
     if len(tokens) == 1:
@@ -152,7 +118,7 @@
         # This should be considered a test class name followed by
         # a list of test case names.
         test_cls_name, test_case_names = tokens
-        clean_names = [elem.strip() for elem in test_case_names.split(',')]
+        clean_names = [elem.strip() for elem in test_case_names.split(",")]
         return test_cls_name, clean_names
 
 
@@ -206,44 +172,49 @@
             else:
                 raise ActsConfigError(
                     'Expected testbed named "%s", but none was found. Check '
-                    'if you have the correct testbed names.' % name)
+                    "if you have the correct testbed names." % name
+                )
         testbeds = tbs
 
-    if (keys.Config.key_log_path.value not in configs
-            and _ENV_ACTS_LOGPATH in os.environ):
-        print('Using environment log path: %s' %
-              (os.environ[_ENV_ACTS_LOGPATH]))
+    if (
+        keys.Config.key_log_path.value not in configs
+        and _ENV_ACTS_LOGPATH in os.environ
+    ):
+        print("Using environment log path: %s" % (os.environ[_ENV_ACTS_LOGPATH]))
         configs[keys.Config.key_log_path.value] = os.environ[_ENV_ACTS_LOGPATH]
-    if (keys.Config.key_test_paths.value not in configs
-            and _ENV_ACTS_TESTPATHS in os.environ):
-        print('Using environment test paths: %s' %
-              (os.environ[_ENV_ACTS_TESTPATHS]))
-        configs[keys.Config.key_test_paths.
-                value] = os.environ[_ENV_ACTS_TESTPATHS].split(_PATH_SEPARATOR)
-    if (keys.Config.key_test_failure_tracebacks not in configs
-            and _ENV_TEST_FAILURE_TRACEBACKS in os.environ):
-        configs[keys.Config.key_test_failure_tracebacks.
-                value] = os.environ[_ENV_TEST_FAILURE_TRACEBACKS]
+    if (
+        keys.Config.key_test_paths.value not in configs
+        and _ENV_ACTS_TESTPATHS in os.environ
+    ):
+        print("Using environment test paths: %s" % (os.environ[_ENV_ACTS_TESTPATHS]))
+        configs[keys.Config.key_test_paths.value] = os.environ[
+            _ENV_ACTS_TESTPATHS
+        ].split(_PATH_SEPARATOR)
+    if (
+        keys.Config.key_test_failure_tracebacks not in configs
+        and _ENV_TEST_FAILURE_TRACEBACKS in os.environ
+    ):
+        configs[keys.Config.key_test_failure_tracebacks.value] = os.environ[
+            _ENV_TEST_FAILURE_TRACEBACKS
+        ]
 
     # TODO: See if there is a better way to do this: b/29836695
     config_path, _ = os.path.split(utils.abs_path(test_config_path))
     configs[keys.Config.key_config_path.value] = config_path
     _validate_test_config(configs)
-    _validate_testbed_configs(testbeds, config_path)
+    _validate_testbed_configs(testbeds)
     # Unpack testbeds into separate json objects.
     configs.pop(keys.Config.key_testbed.value)
     test_run_configs = []
 
     for _, testbed in testbeds.items():
         test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = testbed[
-            keys.Config.key_testbed_name.value]
+        test_run_config.testbed_name = testbed[keys.Config.key_testbed_name.value]
         test_run_config.controller_configs = testbed
         test_run_config.controller_configs[
-            keys.Config.key_test_paths.value] = configs.get(
-                keys.Config.key_test_paths.value, None)
-        test_run_config.log_path = configs.get(keys.Config.key_log_path.value,
-                                               None)
+            keys.Config.key_test_paths.value
+        ] = configs.get(keys.Config.key_test_paths.value, None)
+        test_run_config.log_path = configs.get(keys.Config.key_log_path.value, None)
         if test_run_config.log_path is not None:
             test_run_config.log_path = utils.abs_path(test_run_config.log_path)
 
@@ -266,13 +237,13 @@
     Returns:
         A list of strings, each is a test specifier.
     """
-    with open(fpath, 'r') as f:
+    with open(fpath, "r") as f:
         tf = []
         for line in f:
             line = line.strip()
             if not line:
                 continue
-            if len(tf) and (tf[-1].endswith(':') or tf[-1].endswith(',')):
+            if len(tf) and (tf[-1].endswith(":") or tf[-1].endswith(",")):
                 tf[-1] += line
             else:
                 tf.append(line)
diff --git a/src/antlion/context.py b/src/antlion/context.py
index 5fe1417..cfe9df8 100644
--- a/src/antlion/context.py
+++ b/src/antlion/context.py
@@ -48,7 +48,7 @@
     """
     if depth is None:
         return _contexts[-1]
-    return _contexts[min(depth, len(_contexts)-1)]
+    return _contexts[min(depth, len(_contexts) - 1)]
 
 
 def get_context_for_event(event):
@@ -67,7 +67,7 @@
         return _get_context_for_test_case_event(event)
     if isinstance(event, TestClassEvent):
         return _get_context_for_test_class_event(event)
-    raise TypeError('Unrecognized event type: %s %s', event, event.__class__)
+    raise TypeError("Unrecognized event type: %s %s", event, event.__class__)
 
 
 def _get_context_for_test_case_event(event):
@@ -180,7 +180,7 @@
         Args:
             log_name: The name of the logger.
             base_output_path: The base path of output files for this logger.
-            """
+        """
         cls._base_output_paths[log_name] = base_output_path
 
     def get_subcontext(self, log_name=None):
@@ -198,7 +198,7 @@
         Returns:
             The output path.
         """
-        return self._subcontexts.get(log_name, '')
+        return self._subcontexts.get(log_name, "")
 
     @classmethod
     def add_subcontext(cls, log_name, subcontext):
@@ -225,9 +225,11 @@
             The output path.
         """
 
-        path = os.path.join(self.get_base_output_path(log_name),
-                            self._get_default_context_dir(),
-                            self.get_subcontext(log_name))
+        path = os.path.join(
+            self.get_base_output_path(log_name),
+            self._get_default_context_dir(),
+            self.get_subcontext(log_name),
+        )
         os.makedirs(path, exist_ok=True)
         return path
 
@@ -251,8 +253,9 @@
             return logging.log_path
         except AttributeError as e:
             raise EnvironmentError(
-                'The ACTS logger has not been set up and'
-                ' "base_output_path" has not been set.') from e
+                "The ACTS logger has not been set up and"
+                ' "base_output_path" has not been set.'
+            ) from e
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context."""
@@ -264,14 +267,14 @@
 
     @property
     def identifier(self):
-        return 'root'
+        return "root"
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context.
 
         Logs at the root level context are placed directly in the base level
         directory, so no context-level path exists."""
-        return ''
+        return ""
 
 
 class TestClassContext(TestContext):
@@ -336,7 +339,7 @@
 
     @property
     def identifier(self):
-        return '%s.%s' % (self.test_class_name, self.test_case_name)
+        return "%s.%s" % (self.test_class_name, self.test_case_name)
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context.
@@ -345,9 +348,7 @@
         by the name of the test case. This is in line with the ACTS logger
         itself.
         """
-        return os.path.join(
-            self.test_class_name,
-            self.test_case_name)
+        return os.path.join(self.test_class_name, self.test_case_name)
 
 
 # stack for keeping track of the current test context
diff --git a/src/antlion/controllers/__init__.py b/src/antlion/controllers/__init__.py
index 640393c..e69de29 100644
--- a/src/antlion/controllers/__init__.py
+++ b/src/antlion/controllers/__init__.py
@@ -1,31 +0,0 @@
-"""Modules under antlion.controllers provide interfaces to hardware/software
-resources that ACTS manages.
-
-Top level controllers module are controller modules that need to be explicitly
-specified by users in test configuration files. Top level controller modules
-should have the following module level functions:
-
-def create(configs, logger):
-    '''Instantiates the controller class with the input configs.
-    Args:
-        configs: A list of dicts each representing config for one controller
-            object.
-        logger: The main logger used in the current test run.
-    Returns:
-        A list of controller objects.
-
-def destroy(objs):
-    '''Destroys a list of controller objects created by the "create" function
-    and releases all the resources.
-
-    Args:
-        objs: A list of controller objects created from this module.
-    '''
-"""
-"""This is a list of all the top level controller modules"""
-__all__ = [
-    "android_device", "attenuator", "bluetooth_pts_device", "monsoon",
-    "access_point", "iperf_server", "packet_sender", "arduino_wifi_dongle",
-    "packet_capture", "fuchsia_device", "pdu", "openwrt_ap", "tigertail",
-    "asus_axe11000_ap"
-]
diff --git a/src/antlion/controllers/abstract_inst.py b/src/antlion/controllers/abstract_inst.py
deleted file mode 100644
index d55c3a5..0000000
--- a/src/antlion/controllers/abstract_inst.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Abstract Instrument Library."""
-
-import socket
-import requests
-from antlion import logger
-
-
-class SocketInstrumentError(Exception):
-    """Abstract Instrument Error Class, via Socket and SCPI."""
-
-    def __init__(self, error, command=None):
-        """Init method for Socket Instrument Error.
-
-        Args:
-            error: Exception error.
-            command: Additional information on command,
-                Type, Str.
-        """
-        super(SocketInstrumentError, self).__init__(error)
-        self._error_code = error
-        self._error_message = self._error_code
-        if command is not None:
-            self._error_message = 'Command {} returned the error: {}.'.format(
-                repr(command), repr(self._error_message))
-
-    def __str__(self):
-        return self._error_message
-
-
-class SocketInstrument(object):
-    """Abstract Instrument Class, via Socket and SCPI."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for Socket Instrument.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        self._socket_timeout = 120
-        self._socket_buffer_size = 1024
-
-        self._ip_addr = ip_addr
-        self._ip_port = ip_port
-
-        self._escseq = '\n'
-        self._codefmt = 'utf-8'
-
-        self._logger = logger.create_tagged_trace_logger(
-            '%s:%s' % (self._ip_addr, self._ip_port))
-
-        self._socket = None
-
-    def _connect_socket(self):
-        """Init and Connect to socket."""
-        try:
-            self._socket = socket.create_connection(
-                (self._ip_addr, self._ip_port), timeout=self._socket_timeout)
-
-            infmsg = 'Opened Socket connection to {}:{} with handle {}.'.format(
-                repr(self._ip_addr), repr(self._ip_port), repr(self._socket))
-            self._logger.debug(infmsg)
-
-        except socket.timeout:
-            errmsg = 'Socket timeout while connecting to instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = 'Socket error while connecting to instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-    def _send(self, cmd):
-        """Send command via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-        """
-        if not self._socket:
-            self._logger.warning('Socket instrument is not connected')
-            self._connect_socket()
-
-        cmd_es = cmd + self._escseq
-
-        try:
-            self._socket.sendall(cmd_es.encode(self._codefmt))
-            self._logger.debug('Sent %r to %r:%r.', cmd, self._ip_addr,
-                               self._ip_port)
-
-        except socket.timeout:
-            errmsg = ('Socket timeout while sending command {} '
-                      'to instrument.').format(repr(cmd))
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = ('Socket error while sending command {} '
-                      'to instrument.').format(repr(cmd))
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except Exception as err:
-            errmsg = ('Error {} while sending command {} '
-                      'to instrument.').format(repr(cmd), repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-    def _recv(self):
-        """Receive response via Socket.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        if not self._socket:
-            self._logger.warning('Socket instrument is not connected')
-            self._connect_socket()
-
-        resp = ''
-
-        try:
-            while True:
-                resp_tmp = self._socket.recv(self._socket_buffer_size)
-                resp_tmp = resp_tmp.decode(self._codefmt)
-                resp += resp_tmp
-                if len(resp_tmp) < self._socket_buffer_size:
-                    break
-
-        except socket.timeout:
-            errmsg = 'Socket timeout while receiving response from instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = 'Socket error while receiving response from instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except Exception as err:
-            errmsg = ('Error {} while receiving response '
-                      'from instrument').format(repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-        resp = resp.rstrip(self._escseq)
-
-        self._logger.debug('Received %r from %r:%r.', resp, self._ip_addr,
-                           self._ip_port)
-
-        return resp
-
-    def _close_socket(self):
-        """Close Socket Instrument."""
-        if not self._socket:
-            return
-
-        try:
-            self._socket.shutdown(socket.SHUT_RDWR)
-            self._socket.close()
-            self._socket = None
-            self._logger.debug('Closed Socket Instrument %r:%r.',
-                               self._ip_addr, self._ip_port)
-
-        except Exception as err:
-            errmsg = 'Error {} while closing instrument.'.format(repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-    def _query(self, cmd):
-        """query instrument via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        self._send(cmd + ';*OPC?')
-        resp = self._recv()
-        return resp
-
-
-class RequestInstrument(object):
-    """Abstract Instrument Class, via Request."""
-
-    def __init__(self, ip_addr):
-        """Init method for request instrument.
-
-        Args:
-            ip_addr: IP Address.
-                Type, Str.
-        """
-        self._request_timeout = 120
-        self._request_protocol = 'http'
-        self._ip_addr = ip_addr
-        self._escseq = '\r\n'
-
-        self._logger = logger.create_tagged_trace_logger(self._ip_addr)
-
-    def _query(self, cmd):
-        """query instrument via request.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via request,
-                Type, Str.
-        """
-        request_cmd = '{}://{}/{}'.format(self._request_protocol,
-                                          self._ip_addr, cmd)
-        resp_raw = requests.get(request_cmd, timeout=self._request_timeout)
-
-        resp = resp_raw.text
-        for char_del in self._escseq:
-            resp = resp.replace(char_del, '')
-
-        self._logger.debug('Sent %r to %r, and get %r.', cmd, self._ip_addr,
-                           resp)
-
-        return resp
diff --git a/src/antlion/controllers/access_point.py b/src/antlion/controllers/access_point.py
index d9116ce..91a241d 100755
--- a/src/antlion/controllers/access_point.py
+++ b/src/antlion/controllers/access_point.py
@@ -14,188 +14,60 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import collections
 import ipaddress
-import os
 import time
 
-from typing import FrozenSet, Set, TYPE_CHECKING
+from dataclasses import dataclass
+from typing import Any, Dict, FrozenSet, List, Optional, Set, Tuple
 
 from antlion import logger
 from antlion import utils
-from antlion.controllers import pdu
-from antlion.controllers.ap_lib import ap_get_interface
-from antlion.controllers.ap_lib import ap_iwconfig
-from antlion.controllers.ap_lib import bridge_interface
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.controllers.ap_lib import dhcp_server
-from antlion.controllers.ap_lib import hostapd
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_config
+from antlion.capabilities.ssh import SSHConfig, SSHProvider
 from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import radvd
-from antlion.controllers.ap_lib import radvd_config
+from antlion.controllers.ap_lib.ap_get_interface import ApInterfaces
+from antlion.controllers.ap_lib.ap_iwconfig import ApIwconfig
+from antlion.controllers.ap_lib.bridge_interface import BridgeInterface
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet
+from antlion.controllers.ap_lib.dhcp_server import DhcpServer, NoInterfaceError
 from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionManagementRequest
+from antlion.controllers.ap_lib.hostapd import Hostapd
+from antlion.controllers.ap_lib.hostapd_ap_preset import create_ap_preset
+from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.radvd import Radvd
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
+from antlion.controllers.pdu import PduDevice, get_pdu_port_for_device
 from antlion.controllers.utils_lib.commands import ip
 from antlion.controllers.utils_lib.commands import route
 from antlion.controllers.utils_lib.ssh import connection
 from antlion.controllers.utils_lib.ssh import settings
 from antlion.libs.proc import job
 
-if TYPE_CHECKING:
-    from antlion.controllers.ap_lib.radvd import Radvd
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'AccessPoint'
-ACTS_CONTROLLER_REFERENCE_NAME = 'access_points'
-_BRCTL = 'brctl'
-
-LIFETIME = 180
-PROC_NET_SNMP6 = '/proc/net/snmp6'
-SCAPY_INSTALL_COMMAND = 'sudo python setup.py install'
-RA_MULTICAST_ADDR = '33:33:00:00:00:01'
-RA_SCRIPT = 'sendra.py'
-
-
-def create(configs):
-    """Creates ap controllers from a json config.
-
-    Creates an ap controller from either a list, or a single
-    element. The element can either be just the hostname or a dictionary
-    containing the hostname and username of the ap to connect to over ssh.
-
-    Args:
-        The json configs that represent this controller.
-
-    Returns:
-        A new AccessPoint.
-    """
-    return [AccessPoint(c) for c in configs]
-
-
-def destroy(aps):
-    """Destroys a list of access points.
-
-    Args:
-        aps: The list of access points to destroy.
-    """
-    for ap in aps:
-        ap.close()
-
-
-def get_info(aps):
-    """Get information on a list of access points.
-
-    Args:
-        aps: A list of AccessPoints.
-
-    Returns:
-        A list of all aps hostname.
-    """
-    return [ap.ssh_settings.hostname for ap in aps]
-
-
-def setup_ap(
-        access_point,
-        profile_name,
-        channel,
-        ssid,
-        mode=None,
-        preamble=None,
-        beacon_interval=None,
-        dtim_period=None,
-        frag_threshold=None,
-        rts_threshold=None,
-        force_wmm=None,
-        hidden=False,
-        security=None,
-        pmf_support=None,
-        additional_ap_parameters=None,
-        password=None,
-        n_capabilities=None,
-        ac_capabilities=None,
-        vht_bandwidth=None,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-        setup_bridge=False,
-        is_ipv6_enabled=False,
-        is_nat_enabled=True):
-    """Creates a hostapd profile and runs it on an ap. This is a convenience
-    function that allows us to start an ap with a single function, without first
-    creating a hostapd config.
-
-    Args:
-        access_point: An ACTS access_point controller
-        profile_name: The profile name of one of the hostapd ap presets.
-        channel: What channel to set the AP to.
-        preamble: Whether to set short or long preamble (True or False)
-        beacon_interval: The beacon interval (int)
-        dtim_period: Length of dtim period (int)
-        frag_threshold: Fragmentation threshold (int)
-        rts_threshold: RTS threshold (int)
-        force_wmm: Enable WMM or not (True or False)
-        hidden: Advertise the SSID or not (True or False)
-        security: What security to enable.
-        pmf_support: int, whether pmf is not disabled, enabled, or required
-        additional_ap_parameters: Additional parameters to send the AP.
-        password: Password to connect to WLAN if necessary.
-        check_connectivity: Whether to check for internet connectivity.
-        wnm_features: WNM features to enable on the AP.
-        setup_bridge: Whether to bridge the LAN interface WLAN interface.
-            Only one WLAN interface can be bridged with the LAN interface
-            and none of the guest networks can be bridged.
-        is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
-        is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
-            to access the internet if the WAN port is connected to the internet.
-
-    Returns:
-        An identifier for each ssid being started. These identifiers can be
-        used later by this controller to control the ap.
-
-    Raises:
-        Error: When the ap can't be brought up.
-    """
-    ap = hostapd_ap_preset.create_ap_preset(profile_name=profile_name,
-                                            iface_wlan_2g=access_point.wlan_2g,
-                                            iface_wlan_5g=access_point.wlan_5g,
-                                            channel=channel,
-                                            ssid=ssid,
-                                            mode=mode,
-                                            short_preamble=preamble,
-                                            beacon_interval=beacon_interval,
-                                            dtim_period=dtim_period,
-                                            frag_threshold=frag_threshold,
-                                            rts_threshold=rts_threshold,
-                                            force_wmm=force_wmm,
-                                            hidden=hidden,
-                                            bss_settings=[],
-                                            security=security,
-                                            pmf_support=pmf_support,
-                                            n_capabilities=n_capabilities,
-                                            ac_capabilities=ac_capabilities,
-                                            vht_bandwidth=vht_bandwidth,
-                                            wnm_features=wnm_features)
-    return access_point.start_ap(
-        hostapd_config=ap,
-        radvd_config=radvd_config.RadvdConfig() if is_ipv6_enabled else None,
-        setup_bridge=setup_bridge,
-        is_nat_enabled=is_nat_enabled,
-        additional_parameters=additional_ap_parameters)
+MOBLY_CONTROLLER_CONFIG_NAME = "AccessPoint"
+ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
 
 
 class Error(Exception):
     """Error raised when there is a problem with the access point."""
 
 
-_ApInstance = collections.namedtuple('_ApInstance', ['hostapd', 'subnet'])
+@dataclass
+class _ApInstance:
+    hostapd: Hostapd
+    subnet: Subnet
+
 
 # These ranges were split this way since each physical radio can have up
 # to 8 SSIDs so for the 2GHz radio the DHCP range will be
 # 192.168.1 - 8 and the 5Ghz radio will be 192.168.9 - 16
-_AP_2GHZ_SUBNET_STR_DEFAULT = '192.168.1.0/24'
-_AP_5GHZ_SUBNET_STR_DEFAULT = '192.168.9.0/24'
+_AP_2GHZ_SUBNET_STR_DEFAULT = "192.168.1.0/24"
+_AP_5GHZ_SUBNET_STR_DEFAULT = "192.168.9.0/24"
 
 # The last digit of the ip for the bridge interface
-BRIDGE_IP_LAST = '100'
+BRIDGE_IP_LAST = "100"
 
 
 class AccessPoint(object):
@@ -207,48 +79,58 @@
         dhcp_settings: The dhcp server settings being used.
     """
 
-    def __init__(self, configs):
+    def __init__(self, configs: Dict[str, Any]) -> None:
         """
         Args:
             configs: configs for the access point from config file.
         """
-        self.ssh_settings = settings.from_config(configs['ssh_config'])
+        self.ssh_settings = settings.from_config(configs["ssh_config"])
         self.log = logger.create_logger(
-            lambda msg: f'[Access Point|{self.ssh_settings.hostname}] {msg}')
-        self.device_pdu_config = configs.get('PduDevice', None)
+            lambda msg: f"[Access Point|{self.ssh_settings.hostname}] {msg}"
+        )
+        self.device_pdu_config = configs.get("PduDevice", None)
         self.identifier = self.ssh_settings.hostname
 
-        if 'ap_subnet' in configs:
-            self._AP_2G_SUBNET_STR = configs['ap_subnet']['2g']
-            self._AP_5G_SUBNET_STR = configs['ap_subnet']['5g']
+        if "ap_subnet" in configs:
+            self._AP_2G_SUBNET_STR: str = configs["ap_subnet"]["2g"]
+            self._AP_5G_SUBNET_STR: str = configs["ap_subnet"]["5g"]
         else:
             self._AP_2G_SUBNET_STR = _AP_2GHZ_SUBNET_STR_DEFAULT
             self._AP_5G_SUBNET_STR = _AP_5GHZ_SUBNET_STR_DEFAULT
 
-        self._AP_2G_SUBNET = dhcp_config.Subnet(
-            ipaddress.ip_network(self._AP_2G_SUBNET_STR))
-        self._AP_5G_SUBNET = dhcp_config.Subnet(
-            ipaddress.ip_network(self._AP_5G_SUBNET_STR))
+        self._AP_2G_SUBNET = Subnet(ipaddress.ip_network(self._AP_2G_SUBNET_STR))
+        self._AP_5G_SUBNET = Subnet(ipaddress.ip_network(self._AP_5G_SUBNET_STR))
 
         self.ssh = connection.SshConnection(self.ssh_settings)
 
+        # TODO(http://b/278758876): Replace self.ssh with self.ssh_provider
+        self.ssh_provider = SSHProvider(
+            SSHConfig(
+                self.ssh_settings.username,
+                self.ssh_settings.hostname,
+                self.ssh_settings.identity_file,
+                port=self.ssh_settings.port,
+                ssh_binary=self.ssh_settings.executable,
+                connect_timeout=90,
+            )
+        )
+
         # Singleton utilities for running various commands.
         self._ip_cmd = ip.LinuxIpCommand(self.ssh)
         self._route_cmd = route.LinuxRouteCommand(self.ssh)
 
         # A map from network interface name to _ApInstance objects representing
         # the hostapd instance running against the interface.
-        self._aps = dict()
-        self._dhcp = None
-        self._dhcp_bss = dict()
-        self._radvd: Radvd = None
-        self.bridge = bridge_interface.BridgeInterface(self)
-        self.iwconfig = ap_iwconfig.ApIwconfig(self)
+        self._aps: Dict[str, _ApInstance] = dict()
+        self._dhcp: Optional[DhcpServer] = None
+        self._dhcp_bss: Dict[Any, Subnet] = dict()
+        self._radvd: Optional[Radvd] = None
+        self.bridge = BridgeInterface(self)
+        self.iwconfig = ApIwconfig(self)
 
         # Check to see if wan_interface is specified in acts_config for tests
         # isolated from the internet and set this override.
-        self.interfaces = ap_get_interface.ApInterfaces(
-            self, configs.get('wan_interface'))
+        self.interfaces = ApInterfaces(self, configs.get("wan_interface"))
 
         # Get needed interface names and initialize the unnecessary ones.
         self.wan = self.interfaces.get_wan_interface()
@@ -257,10 +139,9 @@
         self.wlan_5g = self.wlan[1]
         self.lan = self.interfaces.get_lan_interface()
         self._initial_ap()
-        self.scapy_install_path = None
         self.setup_bridge = False
 
-    def _initial_ap(self):
+    def _initial_ap(self) -> None:
         """Initial AP interfaces.
 
         Bring down hostapd if instance is running, bring down all bridge
@@ -271,32 +152,34 @@
         # interfaces need to be brought down as part of the AP initialization
         # process, otherwise test would fail.
         try:
-            self.ssh.run('stop wpasupplicant')
+            self.ssh.run("stop wpasupplicant")
         except job.Error:
-            self.log.info('No wpasupplicant running')
+            self.log.info("No wpasupplicant running")
         try:
-            self.ssh.run('stop hostapd')
+            self.ssh.run("stop hostapd")
         except job.Error:
-            self.log.info('No hostapd running')
+            self.log.info("No hostapd running")
         # Bring down all wireless interfaces
         for iface in self.wlan:
-            WLAN_DOWN = f'ip link set {iface} down'
+            WLAN_DOWN = f"ip link set {iface} down"
             self.ssh.run(WLAN_DOWN)
         # Bring down all bridge interfaces
         bridge_interfaces = self.interfaces.get_bridge_interface()
         if bridge_interfaces:
             for iface in bridge_interfaces:
-                BRIDGE_DOWN = f'ip link set {iface} down'
-                BRIDGE_DEL = f'brctl delbr {iface}'
+                BRIDGE_DOWN = f"ip link set {iface} down"
+                BRIDGE_DEL = f"brctl delbr {iface}"
                 self.ssh.run(BRIDGE_DOWN)
                 self.ssh.run(BRIDGE_DEL)
 
-    def start_ap(self,
-                 hostapd_config: hostapd_config.HostapdConfig,
-                 radvd_config=None,
-                 setup_bridge=False,
-                 is_nat_enabled=True,
-                 additional_parameters=None):
+    def start_ap(
+        self,
+        hostapd_config: HostapdConfig,
+        radvd_config: RadvdConfig = None,
+        setup_bridge: bool = False,
+        is_nat_enabled: bool = True,
+        additional_parameters: Dict[str, Any] = None,
+    ) -> List[Any]:
         """Starts as an ap using a set of configurations.
 
         This will start an ap on this host. To start an ap the controller
@@ -306,19 +189,17 @@
         for that subnet for any device that connects through that interface.
 
         Args:
-            hostapd_config: hostapd_config.HostapdConfig, The configurations
-                to use when starting up the ap.
-            radvd_config: radvd_config.RadvdConfig, The IPv6 configuration
-                to use when starting up the ap.
+            hostapd_config: The configurations to use when starting up the ap.
+            radvd_config: The IPv6 configuration to use when starting up the ap.
             setup_bridge: Whether to bridge the LAN interface WLAN interface.
                 Only one WLAN interface can be bridged with the LAN interface
                 and none of the guest networks can be bridged.
             is_nat_enabled: If True, start NAT on the AP to allow the DUT to be
                 able to access the internet if the WAN port is connected to the
                 internet.
-            additional_parameters: A dictionary of parameters that can sent
-                directly into the hostapd config file.  This can be used for
-                debugging and or adding one off parameters into the config.
+            additional_parameters: Parameters that can sent directly into the
+                hostapd config file.  This can be used for debugging and or
+                adding one off parameters into the config.
 
         Returns:
             An identifier for each ssid being started. These identifiers can be
@@ -336,13 +217,13 @@
 
         # radvd requires the interface to have a IPv6 link-local address.
         if radvd_config:
-            self.ssh.run(f'sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0')
-            self.ssh.run(f'sysctl -w net.ipv6.conf.{interface}.forwarding=1')
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0")
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.forwarding=1")
 
         # In order to handle dhcp servers on any interface, the initiation of
         # the dhcp server must be done after the wlan interfaces are figured
         # out as opposed to being in __init__
-        self._dhcp = dhcp_server.DhcpServer(self.ssh, interface=interface)
+        self._dhcp = DhcpServer(self.ssh, interface=interface)
 
         # For multi bssid configurations the mac address
         # of the wireless interface needs to have enough space to mask out
@@ -352,16 +233,18 @@
         cmd = f"ip link show {interface}|grep ether|awk -F' ' '{{print $2}}'"
         interface_mac_orig = self.ssh.run(cmd)
         if interface == self.wlan_5g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + '0'
+            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "0"
             last_octet = 1
         if interface == self.wlan_2g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + '8'
+            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "8"
             last_octet = 9
         if interface in self._aps:
-            raise ValueError('No WiFi interface available for AP on '
-                             f'channel {hostapd_config.channel}')
+            raise ValueError(
+                "No WiFi interface available for AP on "
+                f"channel {hostapd_config.channel}"
+            )
 
-        apd = hostapd.Hostapd(self.ssh, interface)
+        apd = Hostapd(self.ssh, interface)
         new_instance = _ApInstance(hostapd=apd, subnet=subnet)
         self._aps[interface] = new_instance
 
@@ -374,7 +257,7 @@
         # on the AP, but not for traffic handled by the Linux networking stack
         # such as ping.
         if radvd_config:
-            self._route_cmd.add_route(interface, 'fe80::/64')
+            self._route_cmd.add_route(interface, "fe80::/64")
 
         self._dhcp_bss = dict()
         if hostapd_config.bss_lookup:
@@ -390,15 +273,17 @@
             for bss in hostapd_config.bss_lookup:
                 if interface_mac_orig:
                     hostapd_config.bss_lookup[bss].bssid = (
-                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:])
+                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:]
+                    )
                 self._route_cmd.clear_routes(net_interface=str(bss))
                 if interface is self.wlan_2g:
                     starting_ip_range = self._AP_2G_SUBNET_STR
                 else:
                     starting_ip_range = self._AP_5G_SUBNET_STR
-                a, b, c, d = starting_ip_range.split('.')
-                self._dhcp_bss[bss] = dhcp_config.Subnet(
-                    ipaddress.ip_network(f'{a}.{b}.{int(c) + counter}.{d}'))
+                a, b, c, d = starting_ip_range.split(".")
+                self._dhcp_bss[bss] = Subnet(
+                    ipaddress.ip_network(f"{a}.{b}.{int(c) + counter}.{d}")
+                )
                 counter = counter + 1
                 last_octet = last_octet + 1
 
@@ -407,9 +292,10 @@
         # The DHCP serer requires interfaces to have ips and routes before
         # the server will come up.
         interface_ip = ipaddress.ip_interface(
-            f'{subnet.router}/{subnet.network.netmask}')
+            f"{subnet.router}/{subnet.network.netmask}"
+        )
         if setup_bridge is True:
-            bridge_interface_name = 'eth_test'
+            bridge_interface_name = "eth_test"
             self.create_bridge(bridge_interface_name, [interface, self.lan])
             self._ip_cmd.set_ipv4_address(bridge_interface_name, interface_ip)
         else:
@@ -421,13 +307,13 @@
             # variables represent the interface name, k, and dhcp info, v.
             for k, v in self._dhcp_bss.items():
                 bss_interface_ip = ipaddress.ip_interface(
-                    f'{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}'
+                    f"{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}"
                 )
                 self._ip_cmd.set_ipv4_address(str(k), bss_interface_ip)
 
         # Restart the DHCP server with our updated list of subnets.
         configured_subnets = self.get_configured_subnets()
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=configured_subnets)
+        dhcp_conf = DhcpConfig(subnets=configured_subnets)
         self.start_dhcp(dhcp_conf=dhcp_conf)
         if is_nat_enabled:
             self.start_nat()
@@ -437,7 +323,7 @@
             self.enable_forwarding()
         if radvd_config:
             radvd_interface = bridge_interface_name if setup_bridge else interface
-            self._radvd = radvd.Radvd(self.ssh, radvd_interface)
+            self._radvd = Radvd(self.ssh, radvd_interface)
             self._radvd.start(radvd_config)
         else:
             self._radvd = None
@@ -447,40 +333,40 @@
 
         return bss_interfaces
 
-    def get_configured_subnets(self):
+    def get_configured_subnets(self) -> List[Subnet]:
         """Get the list of configured subnets on the access point.
 
         This allows consumers of the access point objects create custom DHCP
         configs with the correct subnets.
 
-        Returns: a list of dhcp_config.Subnet objects
+        Returns: a list of Subnet objects
         """
         configured_subnets = [x.subnet for x in self._aps.values()]
         for k, v in self._dhcp_bss.items():
             configured_subnets.append(v)
         return configured_subnets
 
-    def start_dhcp(self, dhcp_conf):
+    def start_dhcp(self, dhcp_conf: DhcpConfig) -> None:
         """Start a DHCP server for the specified subnets.
 
         This allows consumers of the access point objects to control DHCP.
 
         Args:
-            dhcp_conf: A dhcp_config.DhcpConfig object.
+            dhcp_conf: A DhcpConfig object.
 
         Raises:
             Error: Raised when a dhcp server error is found.
         """
         self._dhcp.start(config=dhcp_conf)
 
-    def stop_dhcp(self):
+    def stop_dhcp(self) -> None:
         """Stop DHCP for this AP object.
 
         This allows consumers of the access point objects to control DHCP.
         """
         self._dhcp.stop()
 
-    def get_dhcp_logs(self):
+    def get_dhcp_logs(self) -> Optional[str]:
         """Get DHCP logs for this AP object.
 
         This allows consumers of the access point objects to validate DHCP
@@ -494,7 +380,7 @@
             return self._dhcp.get_logs()
         return None
 
-    def get_hostapd_logs(self):
+    def get_hostapd_logs(self) -> Dict[str, str]:
         """Get hostapd logs for all interfaces on AP object.
 
         This allows consumers of the access point objects to validate hostapd
@@ -504,11 +390,10 @@
         """
         hostapd_logs = dict()
         for identifier in self._aps:
-            hostapd_logs[identifier] = self._aps.get(
-                identifier).hostapd.pull_logs()
+            hostapd_logs[identifier] = self._aps.get(identifier).hostapd.pull_logs()
         return hostapd_logs
 
-    def get_radvd_logs(self):
+    def get_radvd_logs(self) -> Optional[str]:
         """Get radvd logs for this AP object.
 
         This allows consumers of the access point objects to validate radvd
@@ -522,16 +407,16 @@
             return self._radvd.pull_logs()
         return None
 
-    def enable_forwarding(self):
+    def enable_forwarding(self) -> None:
         """Enable IPv4 and IPv6 forwarding on the AP.
 
         When forwarding is enabled, the access point is able to route IP packets
         between devices in the same subnet.
         """
-        self.ssh.run('echo 1 > /proc/sys/net/ipv4/ip_forward')
-        self.ssh.run('echo 1 > /proc/sys/net/ipv6/conf/all/forwarding')
+        self.ssh.run("echo 1 > /proc/sys/net/ipv4/ip_forward")
+        self.ssh.run("echo 1 > /proc/sys/net/ipv6/conf/all/forwarding")
 
-    def start_nat(self):
+    def start_nat(self) -> None:
         """Start NAT on the AP.
 
         This allows consumers of the access point objects to enable NAT
@@ -544,11 +429,10 @@
         # the WAN and LAN/WLAN ports.  This means anyone connecting to the
         # WLAN/LAN ports will be able to access the internet if the WAN port
         # is connected to the internet.
-        self.ssh.run('iptables -t nat -F')
-        self.ssh.run(
-            f'iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE')
+        self.ssh.run("iptables -t nat -F")
+        self.ssh.run(f"iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE")
 
-    def stop_nat(self):
+    def stop_nat(self) -> None:
         """Stop NAT on the AP.
 
         This allows consumers of the access point objects to disable NAT on the
@@ -557,9 +441,9 @@
         Note that this is currently a global setting, since we don't have
         per-interface masquerade rules.
         """
-        self.ssh.run('iptables -t nat -F')
+        self.ssh.run("iptables -t nat -F")
 
-    def create_bridge(self, bridge_name, interfaces):
+    def create_bridge(self, bridge_name: str, interfaces: List[str]) -> None:
         """Create the specified bridge and bridge the specified interfaces.
 
         Args:
@@ -568,14 +452,14 @@
         """
 
         # Create the bridge interface
-        self.ssh.run(f'brctl addbr {bridge_name}')
+        self.ssh.run(f"brctl addbr {bridge_name}")
 
         for interface in interfaces:
-            self.ssh.run(f'brctl addif {bridge_name} {interface}')
+            self.ssh.run(f"brctl addif {bridge_name} {interface}")
 
-        self.ssh.run(f'ip link set {bridge_name} up')
+        self.ssh.run(f"ip link set {bridge_name} up")
 
-    def remove_bridge(self, bridge_name):
+    def remove_bridge(self, bridge_name: str) -> None:
         """Removes the specified bridge
 
         Args:
@@ -587,15 +471,15 @@
         #
         # Or if we're doing 2.4Ghz and 5Ghz SSIDs and we've already torn
         # down the bridge once, but we got called for each band.
-        result = self.ssh.run(f'brctl show {bridge_name}', ignore_status=True)
+        result = self.ssh.run(f"brctl show {bridge_name}", ignore_status=True)
 
         # If the bridge exists, we'll get an exit_status of 0, indicating
         # success, so we can continue and remove the bridge.
         if result.exit_status == 0:
-            self.ssh.run(f'ip link set {bridge_name} down')
-            self.ssh.run(f'brctl delbr {bridge_name}')
+            self.ssh.run(f"ip link set {bridge_name} down")
+            self.ssh.run(f"brctl delbr {bridge_name}")
 
-    def get_bssid_from_ssid(self, ssid, band):
+    def get_bssid_from_ssid(self, ssid: str, band: str) -> Optional[str]:
         """Gets the BSSID from a provided SSID
 
         Args:
@@ -611,8 +495,9 @@
         # Get the interface name associated with the given ssid.
         for interface in interfaces:
             iw_output = self.ssh.run(
-                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'")
-            if 'command failed: No such device' in iw_output.stderr:
+                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'"
+            )
+            if "command failed: No such device" in iw_output.stderr:
                 continue
             else:
                 # If the configured ssid is equal to the given ssid, we found
@@ -624,7 +509,7 @@
                     return iw_output.stdout
         return None
 
-    def stop_ap(self, identifier):
+    def stop_ap(self, identifier: str) -> None:
         """Stops a running ap on this controller.
 
         Args:
@@ -632,7 +517,7 @@
         """
 
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
 
         instance = self._aps.get(identifier)
 
@@ -640,7 +525,7 @@
             self._radvd.stop()
         try:
             self.stop_dhcp()
-        except dhcp_server.NoInterfaceError:
+        except NoInterfaceError:
             pass
         self.stop_nat()
         instance.hostapd.stop()
@@ -650,18 +535,18 @@
         bridge_interfaces = self.interfaces.get_bridge_interface()
         if bridge_interfaces:
             for iface in bridge_interfaces:
-                BRIDGE_DOWN = f'ip link set {iface} down'
-                BRIDGE_DEL = f'brctl delbr {iface}'
+                BRIDGE_DOWN = f"ip link set {iface} down"
+                BRIDGE_DEL = f"brctl delbr {iface}"
                 self.ssh.run(BRIDGE_DOWN)
                 self.ssh.run(BRIDGE_DEL)
 
-    def stop_all_aps(self):
+    def stop_all_aps(self) -> None:
         """Stops all running aps on this device."""
 
         for ap in list(self._aps.keys()):
             self.stop_ap(ap)
 
-    def close(self):
+    def close(self) -> None:
         """Called to take down the entire access point.
 
         When called will stop all aps running on this host, shutdown the dhcp
@@ -672,7 +557,7 @@
             self.stop_all_aps()
         self.ssh.close()
 
-    def generate_bridge_configs(self, channel):
+    def generate_bridge_configs(self, channel: int) -> Tuple[str, Optional[str], str]:
         """Generate a list of configs for a bridge between LAN and WLAN.
 
         Args:
@@ -691,125 +576,65 @@
 
         iface_lan = self.lan
 
-        a, b, c, _ = subnet_str.strip('/24').split('.')
-        bridge_ip = f'{a}.{b}.{c}.{BRIDGE_IP_LAST}'
+        a, b, c, _ = subnet_str.strip("/24").split(".")
+        bridge_ip = f"{a}.{b}.{c}.{BRIDGE_IP_LAST}"
 
-        configs = (iface_wlan, iface_lan, bridge_ip)
+        return (iface_wlan, iface_lan, bridge_ip)
 
-        return configs
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 56,
+        additional_ping_params: Optional[Any] = None,
+    ) -> Dict[str, Any]:
+        """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)"""
+        return utils.ping(
+            self.ssh,
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
 
-    def install_scapy(self, scapy_path, send_ra_path):
-        """Install scapy
-
-        Args:
-            scapy_path: path where scapy tar file is located on server
-            send_ra_path: path where sendra path is located on server
-        """
-        self.scapy_install_path = self.ssh.run('mktemp -d').stdout.rstrip()
-        self.log.info(f'Scapy install path: {self.scapy_install_path}')
-        self.ssh.send_file(scapy_path, self.scapy_install_path)
-        self.ssh.send_file(send_ra_path, self.scapy_install_path)
-
-        scapy = os.path.join(self.scapy_install_path,
-                             scapy_path.split('/')[-1])
-
-        self.ssh.run(f'tar -xvf {scapy} -C {self.scapy_install_path}')
-        self.ssh.run(f'cd {self.scapy_install_path}; {SCAPY_INSTALL_COMMAND}')
-
-    def cleanup_scapy(self):
-        """ Cleanup scapy """
-        if self.scapy_install_path:
-            cmd = f'rm -rf {self.scapy_install_path}'
-            self.log.info(f'Cleaning up scapy {cmd}')
-            output = self.ssh.run(cmd)
-            self.scapy_install_path = None
-
-    def send_ra(self,
-                iface,
-                mac=RA_MULTICAST_ADDR,
-                interval=1,
-                count=None,
-                lifetime=LIFETIME,
-                rtt=0):
-        """Invoke scapy and send RA to the device.
-
-        Args:
-          iface: string of the WiFi interface to use for sending packets.
-          mac: string HWAddr/MAC address to send the packets to.
-          interval: int Time to sleep between consecutive packets.
-          count: int Number of packets to be sent.
-          lifetime: int original RA's router lifetime in seconds.
-          rtt: retrans timer of the RA packet
-        """
-        scapy_command = os.path.join(self.scapy_install_path, RA_SCRIPT)
-        options = f' -m {mac} -i {interval} -c {count} -l {lifetime} -in {iface} -rtt {rtt}'
-        cmd = scapy_command + options
-        self.log.info(f'Scapy cmd: {cmd}')
-        self.ssh.run(cmd)
-
-    def get_icmp6intype134(self):
-        """Read the value of Icmp6InType134 and return integer.
-
-        Returns:
-            Integer value >0 if grep is successful; 0 otherwise.
-        """
-        ra_count_str = self.ssh.run(
-            f'grep Icmp6InType134 {PROC_NET_SNMP6} || true').stdout
-        if ra_count_str:
-            return int(ra_count_str.split()[1])
-
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=56,
-             additional_ping_params=None):
-        """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)
-        """
-        return utils.ping(self.ssh,
-                          dest_ip,
-                          count=count,
-                          interval=interval,
-                          timeout=timeout,
-                          size=size,
-                          additional_ping_params=additional_ping_params)
-
-    def can_ping(self,
-                 dest_ip,
-                 count=1,
-                 interval=1000,
-                 timeout=1000,
-                 size=56,
-                 additional_ping_params=None):
+    def can_ping(
+        self,
+        dest_ip: str,
+        count: int = 1,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 56,
+        additional_ping_params: Optional[Any] = None,
+    ) -> bool:
         """Returns whether ap can ping dest_ip (see utils.can_ping)"""
-        return utils.can_ping(self.ssh,
-                              dest_ip,
-                              count=count,
-                              interval=interval,
-                              timeout=timeout,
-                              size=size,
-                              additional_ping_params=additional_ping_params)
+        return utils.can_ping(
+            self.ssh,
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
 
-    def hard_power_cycle(self,
-                         pdus,
-                         unreachable_timeout=30,
-                         ping_timeout=60,
-                         ssh_timeout=30,
-                         hostapd_configs=None):
+    def hard_power_cycle(
+        self,
+        pdus: List[PduDevice],
+        hostapd_configs: Optional[List[HostapdConfig]] = None,
+    ) -> None:
         """Kills, then restores power to AccessPoint, verifying it goes down and
         comes back online cleanly.
 
         Args:
-            pdus: list, PduDevices in the testbed
-            unreachable_timeout: int, time to wait for AccessPoint to become
-                unreachable
-            ping_timeout: int, time to wait for AccessPoint to responsd to pings
-            ssh_timeout: int, time to wait for AccessPoint to allow SSH
-            hostapd_configs (optional): list, containing hostapd settings. If
-                present, these networks will be spun up after the AP has
-                rebooted. This list can either contain HostapdConfig objects, or
-                    dictionaries with the start_ap params
+            pdus: PDUs in the testbed
+            hostapd_configs: Hostapd settings. If present, these networks will
+                be spun up after the AP has rebooted. This list can either
+                contain HostapdConfig objects, or dictionaries with the start_ap
+                params
                     (i.e  { 'hostapd_config': <HostapdConfig>,
                             'setup_bridge': <bool>,
                             'additional_parameters': <dict> } ).
@@ -818,132 +643,223 @@
             ConnectionError, if AccessPoint fails to go offline or come back.
         """
         if not self.device_pdu_config:
-            raise Error('No PduDevice provided in AccessPoint config.')
+            raise Error("No PduDevice provided in AccessPoint config.")
 
         if hostapd_configs is None:
             hostapd_configs = []
 
-        self.log.info(f'Power cycling')
-        ap_pdu, ap_pdu_port = pdu.get_pdu_port_for_device(
-            self.device_pdu_config, pdus)
+        self.log.info(f"Power cycling")
+        ap_pdu, ap_pdu_port = get_pdu_port_for_device(self.device_pdu_config, pdus)
 
-        self.log.info(f'Killing power')
+        self.log.info(f"Killing power")
         ap_pdu.off(str(ap_pdu_port))
 
-        self.log.info('Verifying AccessPoint is unreachable.')
-        timeout = time.time() + unreachable_timeout
-        while time.time() < timeout:
-            if not utils.can_ping(job, self.ssh_settings.hostname):
-                self.log.info('AccessPoint is unreachable as expected.')
-                break
-            else:
-                self.log.debug(
-                    'AccessPoint is still responding to pings. Retrying in 1 '
-                    'second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError(
-                f'Failed to bring down AccessPoint ({self.ssh_settings.hostname})'
-            )
+        self.log.info("Verifying AccessPoint is unreachable.")
+        self.ssh_provider.wait_until_unreachable()
+        self.log.info("AccessPoint is unreachable as expected.")
+
         self._aps.clear()
 
-        self.log.info(f'Restoring power')
+        self.log.info(f"Restoring power")
         ap_pdu.on(str(ap_pdu_port))
 
-        self.log.info('Waiting for AccessPoint to respond to pings.')
-        timeout = time.time() + ping_timeout
-        while time.time() < timeout:
-            if utils.can_ping(job, self.ssh_settings.hostname):
-                self.log.info('AccessPoint responded to pings.')
-                break
-            else:
-                self.log.debug('AccessPoint is not responding to pings. '
-                               'Retrying in 1 second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError(
-                f'Timed out waiting for AccessPoint ({self.ssh_settings.hostname}) '
-                'to respond to pings.')
-
-        self.log.info('Waiting for AccessPoint to allow ssh connection.')
-        timeout = time.time() + ssh_timeout
-        while time.time() < timeout:
-            try:
-                self.ssh.run('echo')
-            except connection.Error:
-                self.log.debug('AccessPoint is not allowing ssh connection. '
-                               'Retrying in 1 second.')
-                time.sleep(1)
-            else:
-                self.log.info('AccessPoint available via ssh.')
-                break
-        else:
-            raise ConnectionError(
-                f'Timed out waiting for AccessPoint ({self.ssh_settings.hostname}) '
-                'to allow ssh connection.')
+        self.log.info("Waiting for AccessPoint to become available via SSH.")
+        self.ssh_provider.wait_until_reachable()
+        self.log.info("AccessPoint responded to SSH.")
 
         # Allow 5 seconds for OS to finish getting set up
         time.sleep(5)
         self._initial_ap()
-        self.log.info('Power cycled successfully')
+        self.log.info("Power cycled successfully")
 
         for settings in hostapd_configs:
-            if type(settings) == hostapd_config.HostapdConfig:
+            if type(settings) == HostapdConfig:
                 config = settings
                 setup_bridge = False
                 additional_parameters = None
 
             elif type(settings) == dict:
-                config = settings['hostapd_config']
-                setup_bridge = settings.get('setup_bridge', False)
-                additional_parameters = settings.get('additional_parameters',
-                                                     None)
+                config = settings["hostapd_config"]
+                setup_bridge = settings.get("setup_bridge", False)
+                additional_parameters = settings.get("additional_parameters", None)
             else:
                 raise TypeError(
-                    'Items in hostapd_configs list must either be '
-                    'hostapd.HostapdConfig objects or dictionaries.')
+                    "Items in hostapd_configs list must either be "
+                    "HostapdConfig objects or dictionaries."
+                )
 
-            self.log.info(f'Restarting network {config.ssid}')
-            self.start_ap(config,
-                          setup_bridge=setup_bridge,
-                          additional_parameters=additional_parameters)
+            self.log.info(f"Restarting network {config.ssid}")
+            self.start_ap(
+                config,
+                setup_bridge=setup_bridge,
+                additional_parameters=additional_parameters,
+            )
 
-    def channel_switch(self, identifier, channel_num):
+    def channel_switch(self, identifier: str, channel_num: int) -> None:
         """Switch to a different channel on the given AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
-        self.log.info(f'channel switch to channel {channel_num}')
+        self.log.info(f"channel switch to channel {channel_num}")
         instance.hostapd.channel_switch(channel_num)
 
-    def get_current_channel(self, identifier):
+    def get_current_channel(self, identifier: str) -> int:
         """Find the current channel on the given AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
         return instance.hostapd.get_current_channel()
 
-    def get_stas(self, identifier) -> Set[str]:
+    def get_stas(self, identifier: str) -> Set[str]:
         """Return MAC addresses of all associated STAs on the given AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
         return instance.hostapd.get_stas()
 
-    def get_sta_extended_capabilities(self, identifier,
-                                      sta_mac: str) -> ExtendedCapabilities:
+    def get_sta_extended_capabilities(
+        self, identifier: str, sta_mac: str
+    ) -> ExtendedCapabilities:
         """Get extended capabilities for the given STA, as seen by the AP."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
+            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
         return instance.hostapd.get_sta_extended_capabilities(sta_mac)
 
     def send_bss_transition_management_req(
-            self, identifier, sta_mac: str,
-            request: BssTransitionManagementRequest):
+        self, identifier: str, sta_mac: str, request: BssTransitionManagementRequest
+    ) -> job.Result:
         """Send a BSS Transition Management request to an associated STA."""
         if identifier not in list(self._aps.keys()):
-            raise ValueError('Invalid identifier {identifier} given')
+            raise ValueError("Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
-        return instance.hostapd.send_bss_transition_management_req(
-            sta_mac, request)
+        return instance.hostapd.send_bss_transition_management_req(sta_mac, request)
+
+
+def setup_ap(
+    access_point: AccessPoint,
+    profile_name: str,
+    channel: int,
+    ssid: str,
+    mode: Optional[str] = None,
+    preamble: Optional[bool] = None,
+    beacon_interval: Optional[int] = None,
+    dtim_period: Optional[int] = None,
+    frag_threshold: Optional[int] = None,
+    rts_threshold: Optional[int] = None,
+    force_wmm: Optional[bool] = None,
+    hidden: Optional[bool] = False,
+    security: Optional[Security] = None,
+    pmf_support: Optional[int] = None,
+    additional_ap_parameters: Optional[Dict[str, Any]] = None,
+    password: Optional[str] = None,
+    n_capabilities: Optional[List[Any]] = None,
+    ac_capabilities: Optional[List[Any]] = None,
+    vht_bandwidth: Optional[int] = None,
+    wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    setup_bridge: bool = False,
+    is_ipv6_enabled: bool = False,
+    is_nat_enabled: bool = True,
+):
+    """Creates a hostapd profile and runs it on an ap. This is a convenience
+    function that allows us to start an ap with a single function, without first
+    creating a hostapd config.
+
+    Args:
+        access_point: An ACTS access_point controller
+        profile_name: The profile name of one of the hostapd ap presets.
+        channel: What channel to set the AP to.
+        preamble: Whether to set short or long preamble
+        beacon_interval: The beacon interval
+        dtim_period: Length of dtim period
+        frag_threshold: Fragmentation threshold
+        rts_threshold: RTS threshold
+        force_wmm: Enable WMM or not
+        hidden: Advertise the SSID or not
+        security: What security to enable.
+        pmf_support: Whether pmf is not disabled, enabled, or required
+        additional_ap_parameters: Additional parameters to send the AP.
+        password: Password to connect to WLAN if necessary.
+        check_connectivity: Whether to check for internet connectivity.
+        wnm_features: WNM features to enable on the AP.
+        setup_bridge: Whether to bridge the LAN interface WLAN interface.
+            Only one WLAN interface can be bridged with the LAN interface
+            and none of the guest networks can be bridged.
+        is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
+        is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
+            to access the internet if the WAN port is connected to the internet.
+
+    Returns:
+        An identifier for each ssid being started. These identifiers can be
+        used later by this controller to control the ap.
+
+    Raises:
+        Error: When the ap can't be brought up.
+    """
+    ap = create_ap_preset(
+        profile_name=profile_name,
+        iface_wlan_2g=access_point.wlan_2g,
+        iface_wlan_5g=access_point.wlan_5g,
+        channel=channel,
+        ssid=ssid,
+        mode=mode,
+        short_preamble=preamble,
+        beacon_interval=beacon_interval,
+        dtim_period=dtim_period,
+        frag_threshold=frag_threshold,
+        rts_threshold=rts_threshold,
+        force_wmm=force_wmm,
+        hidden=hidden,
+        bss_settings=[],
+        security=security,
+        pmf_support=pmf_support,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_bandwidth=vht_bandwidth,
+        wnm_features=wnm_features,
+    )
+    return access_point.start_ap(
+        hostapd_config=ap,
+        radvd_config=RadvdConfig() if is_ipv6_enabled else None,
+        setup_bridge=setup_bridge,
+        is_nat_enabled=is_nat_enabled,
+        additional_parameters=additional_ap_parameters,
+    )
+
+
+def create(configs: Any) -> List[AccessPoint]:
+    """Creates ap controllers from a json config.
+
+    Creates an ap controller from either a list, or a single
+    element. The element can either be just the hostname or a dictionary
+    containing the hostname and username of the ap to connect to over ssh.
+
+    Args:
+        The json configs that represent this controller.
+
+    Returns:
+        A new AccessPoint.
+    """
+    return [AccessPoint(c) for c in configs]
+
+
+def destroy(aps: List[AccessPoint]) -> None:
+    """Destroys a list of access points.
+
+    Args:
+        aps: The list of access points to destroy.
+    """
+    for ap in aps:
+        ap.close()
+
+
+def get_info(aps: List[AccessPoint]) -> List[str]:
+    """Get information on a list of access points.
+
+    Args:
+        aps: A list of AccessPoints.
+
+    Returns:
+        A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in aps]
diff --git a/src/antlion/controllers/adb.py b/src/antlion/controllers/adb.py
index 9acfa1e..5c3848d 100644
--- a/src/antlion/controllers/adb.py
+++ b/src/antlion/controllers/adb.py
@@ -26,19 +26,19 @@
 DEFAULT_ADB_TIMEOUT = 60
 DEFAULT_ADB_PULL_TIMEOUT = 180
 
-ADB_REGEX = re.compile('adb:')
+ADB_REGEX = re.compile("adb:")
 # Uses a regex to be backwards compatible with previous versions of ADB
 # (N and above add the serial to the error msg).
-DEVICE_NOT_FOUND_REGEX = re.compile('error: device (?:\'.*?\' )?not found')
-DEVICE_OFFLINE_REGEX = re.compile('error: device offline')
+DEVICE_NOT_FOUND_REGEX = re.compile("error: device (?:'.*?' )?not found")
+DEVICE_OFFLINE_REGEX = re.compile("error: device offline")
 # Raised when adb forward commands fail to forward a port.
-CANNOT_BIND_LISTENER_REGEX = re.compile('error: cannot bind listener:')
+CANNOT_BIND_LISTENER_REGEX = re.compile("error: cannot bind listener:")
 # Expected output is "Android Debug Bridge version 1.0.XX
-ADB_VERSION_REGEX = re.compile('Android Debug Bridge version 1.0.(\d+)')
-GREP_REGEX = re.compile('grep(\s+)')
+ADB_VERSION_REGEX = re.compile("Android Debug Bridge version 1.0.(\d+)")
+GREP_REGEX = re.compile("grep(\s+)")
 
-ROOT_USER_ID = '0'
-SHELL_USER_ID = '2000'
+ROOT_USER_ID = "0"
+SHELL_USER_ID = "2000"
 
 
 def parsing_parcel_output(output):
@@ -51,8 +51,8 @@
         0x00000020: 00350034 00330035 00320038 00310033 '4.5.5.3.8.2.3.1.'
         0x00000030: 00000000                            '....            ')
     """
-    output = ''.join(re.findall(r"'(.*)'", output))
-    return re.sub(r'[.\s]', '', output)
+    output = "".join(re.findall(r"'(.*)'", output))
+    return re.sub(r"[.\s]", "", output)
 
 
 class AdbProxy(object):
@@ -75,7 +75,7 @@
         """
         self.serial = serial
         self._server_local_port = None
-        adb_path = shutil.which('adb')
+        adb_path = shutil.which("adb")
         adb_cmd = [shlex.quote(adb_path)]
         if serial:
             adb_cmd.append("-s %s" % serial)
@@ -87,8 +87,10 @@
             temp_dir = ssh_connection.run("mktemp -d").stdout.strip()
             ssh_connection.send_file(adb_path, temp_dir)
             # Start up a new adb server running as root from the copied binary.
-            remote_adb_cmd = "%s/adb %s root" % (temp_dir, "-s %s" % serial
-                                                 if serial else "")
+            remote_adb_cmd = "%s/adb %s root" % (
+                temp_dir,
+                "-s %s" % serial if serial else "",
+            )
             ssh_connection.run(remote_adb_cmd)
             # Proxy a local port to the adb server port
             local_port = ssh_connection.create_ssh_tunnel(5037)
@@ -101,7 +103,7 @@
 
     def get_user_id(self):
         """Returns the adb user. Either 2000 (shell) or 0 (root)."""
-        return self.shell('id -u')
+        return self.shell("id -u")
 
     def is_root(self, user_id=None):
         """Checks if the user is root.
@@ -156,13 +158,19 @@
             AdbCommandError for errors from commands executed through ADB.
         """
         if isinstance(cmd, list):
-            cmd = ' '.join(cmd)
+            cmd = " ".join(cmd)
         result = job.run(cmd, ignore_status=True, timeout=timeout)
         ret, out, err = result.exit_status, result.stdout, result.stderr
 
-        if any(pattern.match(err) for pattern in
-               [ADB_REGEX, DEVICE_OFFLINE_REGEX, DEVICE_NOT_FOUND_REGEX,
-                CANNOT_BIND_LISTENER_REGEX]):
+        if any(
+            pattern.match(err)
+            for pattern in [
+                ADB_REGEX,
+                DEVICE_OFFLINE_REGEX,
+                DEVICE_NOT_FOUND_REGEX,
+                CANNOT_BIND_LISTENER_REGEX,
+            ]
+        ):
             raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
         if "Result: Parcel" in out:
             return parsing_parcel_output(out)
@@ -173,8 +181,7 @@
         return out
 
     def _exec_adb_cmd(self, name, arg_str, **kwargs):
-        return self._exec_cmd(' '.join((self.adb_str, name, arg_str)),
-                              **kwargs)
+        return self._exec_cmd(" ".join((self.adb_str, name, arg_str)), **kwargs)
 
     def _exec_cmd_nb(self, cmd, **kwargs):
         """Executes adb commands in a new shell, non blocking.
@@ -186,8 +193,7 @@
         return job.run_async(cmd, **kwargs)
 
     def _exec_adb_cmd_nb(self, name, arg_str, **kwargs):
-        return self._exec_cmd_nb(' '.join((self.adb_str, name, arg_str)),
-                                 **kwargs)
+        return self._exec_cmd_nb(" ".join((self.adb_str, name, arg_str)), **kwargs)
 
     def tcp_forward(self, host_port, device_port):
         """Starts tcp forwarding from localhost to this android device.
@@ -206,9 +212,11 @@
             #     device port
             remote_port = self._ssh_connection.find_free_port()
             host_port = self._ssh_connection.create_ssh_tunnel(
-                remote_port, local_port=host_port)
-        output = self.forward("tcp:%d tcp:%d" % (host_port, device_port),
-                              ignore_status=True)
+                remote_port, local_port=host_port
+            )
+        output = self.forward(
+            "tcp:%d tcp:%d" % (host_port, device_port), ignore_status=True
+        )
         # If hinted_port is 0, the output will be the selected port.
         # Otherwise, there will be no output upon successfully
         # forwarding the hinted port.
@@ -229,8 +237,9 @@
         if self._ssh_connection:
             remote_port = self._ssh_connection.close_ssh_tunnel(host_port)
             if remote_port is None:
-                logging.warning("Cannot close unknown forwarded tcp port: %d",
-                                host_port)
+                logging.warning(
+                    "Cannot close unknown forwarded tcp port: %d", host_port
+                )
                 return
             # The actual port we need to disable via adb is on the remote host.
             host_port = remote_port
@@ -254,20 +263,18 @@
     # command.
     def shell(self, command, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
         return self._exec_adb_cmd(
-            'shell',
-            shlex.quote(command),
-            ignore_status=ignore_status,
-            timeout=timeout)
+            "shell", shlex.quote(command), ignore_status=ignore_status, timeout=timeout
+        )
 
     def shell_nb(self, command):
-        return self._exec_adb_cmd_nb('shell', shlex.quote(command))
+        return self._exec_adb_cmd_nb("shell", shlex.quote(command))
 
     def __getattr__(self, name):
         def adb_call(*args, **kwargs):
-            clean_name = name.replace('_', '-')
-            if clean_name in ['pull', 'push', 'remount'] and 'timeout' not in kwargs:
-                kwargs['timeout'] = DEFAULT_ADB_PULL_TIMEOUT
-            arg_str = ' '.join(str(elem) for elem in args)
+            clean_name = name.replace("_", "-")
+            if clean_name in ["pull", "push", "remount"] and "timeout" not in kwargs:
+                kwargs["timeout"] = DEFAULT_ADB_PULL_TIMEOUT
+            arg_str = " ".join(str(elem) for elem in args)
             return self._exec_adb_cmd(clean_name, arg_str, **kwargs)
 
         return adb_call
@@ -282,7 +289,9 @@
         match = re.search(ADB_VERSION_REGEX, version_output)
 
         if not match:
-            logging.error('Unable to capture ADB version from adb version '
-                          'output: %s' % version_output)
-            raise AdbError('adb version', version_output, '', '')
+            logging.error(
+                "Unable to capture ADB version from adb version "
+                "output: %s" % version_output
+            )
+            raise AdbError("adb version", version_output, "", "")
         return int(match.group(1))
diff --git a/src/antlion/controllers/adb_lib/error.py b/src/antlion/controllers/adb_lib/error.py
index 6fcae7d..9599214 100644
--- a/src/antlion/controllers/adb_lib/error.py
+++ b/src/antlion/controllers/adb_lib/error.py
@@ -28,8 +28,12 @@
         self.ret_code = ret_code
 
     def __str__(self):
-        return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s"
-                ) % (self.cmd, self.ret_code, self.stdout, self.stderr)
+        return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s") % (
+            self.cmd,
+            self.ret_code,
+            self.stdout,
+            self.stderr,
+        )
 
 
 class AdbCommandError(AdbError):
diff --git a/src/antlion/controllers/amarisoft_lib/OWNERS b/src/antlion/controllers/amarisoft_lib/OWNERS
deleted file mode 100644
index edee4ef..0000000
--- a/src/antlion/controllers/amarisoft_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-markusliu@google.com
-mollychang@google.com
-angelayu@google.com
-zoeyliu@google.com
diff --git a/src/antlion/controllers/amarisoft_lib/amarisoft_client.py b/src/antlion/controllers/amarisoft_lib/amarisoft_client.py
deleted file mode 100644
index 5386f81..0000000
--- a/src/antlion/controllers/amarisoft_lib/amarisoft_client.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import json
-import logging
-from typing import Any, Mapping, Optional, Tuple
-
-from antlion.controllers.amarisoft_lib import ssh_utils
-import immutabledict
-import websockets
-
-_CONFIG_DIR_MAPPING = immutabledict.immutabledict({
-    'enb': '/config/enb.cfg',
-    'mme': '/config/mme.cfg',
-    'ims': '/config/ims.cfg',
-    'mbms': '/config/mbmsgw.cfg',
-    'ots': '/config/ots.cfg'
-})
-
-
-class MessageFailureError(Exception):
-  """Raises an error when the message execution fail."""
-
-
-class AmariSoftClient(ssh_utils.RemoteClient):
-  """The SSH client class interacts with Amarisoft.
-
-    A simulator used to simulate the base station can output different signals
-    according to the network configuration settings.
-    For example: T Mobile NSA LTE band 66 + NR band 71.
-  """
-
-  async def _send_message_to_callbox(self, uri: str,
-                                     msg: str) -> Tuple[str, str]:
-    """Implements async function for send message to the callbox.
-
-    Args:
-      uri: The uri of specific websocket interface.
-      msg: The message to be send to callbox.
-
-    Returns:
-      The response from callbox.
-    """
-    async with websockets.connect(
-        uri, extra_headers={'origin': 'Test'}) as websocket:
-      await websocket.send(msg)
-      head = await websocket.recv()
-      body = await websocket.recv()
-    return head, body
-
-  def send_message(self, port: str, msg: str) -> Tuple[str, str]:
-    """Sends a message to the callbox.
-
-    Args:
-      port: The port of specific websocket interface.
-      msg: The message to be send to callbox.
-
-    Returns:
-      The response from callbox.
-    """
-    return asyncio.get_event_loop().run_until_complete(
-        self._send_message_to_callbox(f'ws://{self.host}:{port}/', msg))
-
-  def verify_response(self, func: str, head: str,
-                      body: str) -> Tuple[Mapping[str, Any], Mapping[str, Any]]:
-    """Makes sure there are no error messages in Amarisoft's response.
-
-    If a message produces an error, response will have an error string field
-    representing the error.
-    For example:
-      {
-        "message": "ready",
-        "message_id": <message id>,
-        "error": <error message>,
-        "type": "ENB",
-        "name: <name>,
-      }
-
-    Args:
-      func: The message send to Amarisoft.
-      head: Responsed message head.
-      body: Responsed message body.
-
-    Returns:
-      Standard output of the shell command.
-
-    Raises:
-       MessageFailureError: Raised when an error occurs in the response message.
-    """
-    loaded_head = json.loads(head)
-    loaded_body = json.loads(body)
-
-    if loaded_head.get('message') != 'ready':
-      raise MessageFailureError(
-          f'Fail to get response from callbox, message: {loaded_head["error"]}')
-    if 'error' in loaded_body:
-      raise MessageFailureError(
-          f'Fail to excute {func} with error message: {loaded_body["error"]}')
-    if loaded_body.get('message') != func:
-      raise MessageFailureError(
-          f'The message sent was {loaded_body["message"]} instead of {func}.')
-    return loaded_head, loaded_body
-
-  def lte_service_stop(self) -> None:
-    """Stops to output signal."""
-    self.run_cmd('systemctl stop lte')
-
-  def lte_service_start(self):
-    """Starts to output signal."""
-    self.run_cmd('systemctl start lte')
-
-  def lte_service_restart(self):
-    """Restarts to output signal."""
-    self.run_cmd('systemctl restart lte')
-
-  def lte_service_enable(self):
-    """lte service remains enable until next reboot."""
-    self.run_cmd('systemctl enable lte')
-
-  def lte_service_disable(self):
-    """lte service remains disable until next reboot."""
-    self.run_cmd('systemctl disable lte')
-
-  def lte_service_is_active(self) -> bool:
-    """Checks lte service is active or not.
-
-    Returns:
-      True if service active, False otherwise.
-    """
-    return not any('inactive' in line
-                   for line in self.run_cmd('systemctl is-active lte'))
-
-  def set_config_dir(self, cfg_type: str, path: str) -> None:
-    """Sets the path of target configuration file.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb ...etc.)
-      path: The path of target configuration. (e.g.
-        /root/lteenb-linux-2020-12-14)
-    """
-    path_old = self.get_config_dir(cfg_type)
-    if path != path_old:
-      logging.info('set new path %s (was %s)', path, path_old)
-      self.run_cmd(f'ln -sfn {path} /root/{cfg_type}')
-    else:
-      logging.info('path %s does not change.', path_old)
-
-  def get_config_dir(self, cfg_type: str) -> Optional[str]:
-    """Gets the path of target configuration.
-
-    Args:
-      cfg_type: Target configuration type. (e.g. mme, enb...etc.)
-
-    Returns:
-      The path of configuration.
-    """
-    result = self.run_cmd(f'readlink /root/{cfg_type}')
-    if result:
-      path = result[0].strip()
-    else:
-      logging.warning('%s path not found.', cfg_type)
-      return None
-    return path
-
-  def set_config_file(self, cfg_type: str, cfg_file: str) -> None:
-    """Sets the configuration to be executed.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb...etc.)
-      cfg_file: The configuration to be executed. (e.g.
-        /root/lteenb-linux-2020-12-14/config/gnb.cfg )
-
-    Raises:
-      FileNotFoundError: Raised when a file or directory is requested but
-      doesn’t exist.
-    """
-    cfg_link = self.get_config_dir(cfg_type) + _CONFIG_DIR_MAPPING[cfg_type]
-    if not self.is_file_exist(cfg_file):
-      raise FileNotFoundError("The command file doesn't exist")
-    self.run_cmd(f'ln -sfn {cfg_file} {cfg_link}')
-
-  def get_config_file(self, cfg_type: str) -> Optional[str]:
-    """Gets the current configuration of specific configuration type.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb...etc.)
-
-    Returns:
-      The current configuration with absolute path.
-    """
-    cfg_path = self.get_config_dir(cfg_type) + _CONFIG_DIR_MAPPING[cfg_type]
-    if cfg_path:
-      result = self.run_cmd(f'readlink {cfg_path}')
-      if result:
-        return result[0].strip()
-
-  def get_all_config_dir(self) -> Mapping[str, str]:
-    """Gets all configuration directions.
-
-    Returns:
-      All configuration directions.
-    """
-    config_dir = {}
-    for cfg_type in ('ots', 'enb', 'mme', 'mbms'):
-      config_dir[cfg_type] = self.get_config_dir(cfg_type)
-      logging.debug('get path of %s: %s', cfg_type, config_dir[cfg_type])
-    return config_dir
diff --git a/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py b/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py
deleted file mode 100644
index c62bf2a..0000000
--- a/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py
+++ /dev/null
@@ -1,14 +0,0 @@
-"""Constants for test."""

-

-

-# ports of lte service websocket interface

-class PortNumber:

-  URI_MME = '9000'

-  URI_ENB = '9001'

-  URI_UE = '9002'

-  URI_IMS = '9003'

-  URI_MBMS = '9004'

-  URI_PROBE = '9005'

-  URI_LICENSE = '9006'

-  URI_MON = '9007'

-  URI_VIEW = '9008'

diff --git a/src/antlion/controllers/amarisoft_lib/config_utils.py b/src/antlion/controllers/amarisoft_lib/config_utils.py
deleted file mode 100644
index 8d3b603..0000000
--- a/src/antlion/controllers/amarisoft_lib/config_utils.py
+++ /dev/null
@@ -1,201 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import os
-import immutabledict
-
-from antlion.controllers.amarisoft_lib import amarisoft_client
-
-TEMPLATE_PATH = os.path.dirname(os.path.abspath(__file__)) + '/config_templates'
-TEMPLATE_PATH_ENB = f'{TEMPLATE_PATH}/enb/'
-TEMPLATE_PATH_MME = f'{TEMPLATE_PATH}/mme/'
-
-_CLIENT_CONFIG_DIR_MAPPING = immutabledict.immutabledict({
-    'enb': '/config/mhtest_enb.cfg',
-    'mme': '/config/mhtest_mme.cfg',
-})
-
-
-class EnbCfg():
-  """MME configuration templates."""
-  ENB_GENERIC = 'enb-single-generic.cfg'
-  GNB_NSA_GENERIC = 'gnb-nsa-lte-ho-generic.cfg'
-  GNB_SA_GENERIC = 'gnb-sa-lte-ho-generic.cfg'
-
-
-class MmeCfg():
-  """MME configuration templates."""
-  MME_GENERIC = 'mme-generic.cfg'
-
-
-class SpecTech(enum.Enum):
-  """Spectrum usage techniques."""
-  FDD = 0
-  TDD = 1
-
-
-class ConfigUtils():
-  """Utilities for set Amarisoft configs.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def upload_enb_template(self, cfg: str) -> bool:
-    """Loads ENB configuration.
-
-    Args:
-      cfg: The ENB configuration to be loaded.
-
-    Returns:
-      True if the ENB configuration was loaded successfully, False otherwise.
-    """
-    cfg_template = TEMPLATE_PATH_ENB + cfg
-    if not os.path.isfile(cfg_template):
-      return False
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    self.remote.run_cmd('rm -f ' + cfg_path)
-    self.remote.sftp_upload(cfg_template, cfg_path)
-    self.remote.set_config_file('enb', cfg_path)
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    return True
-
-  def upload_mme_template(self, cfg: str) -> bool:
-    """Loads MME configuration.
-
-    Args:
-      cfg: The MME configuration to be loaded.
-
-    Returns:
-      True if the ENB configuration was loaded successfully, False otherwise.
-    """
-    cfg_template = TEMPLATE_PATH_MME + cfg
-    if not os.path.isfile(cfg_template):
-      return False
-    cfg_path = self.remote.get_config_dir(
-        'mme') + _CLIENT_CONFIG_DIR_MAPPING['mme']
-    self.remote.run_cmd('rm -f ' + cfg_path)
-    self.remote.sftp_upload(cfg_template, cfg_path)
-    self.remote.set_config_file('mme', cfg_path)
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    return True
-
-  def enb_set_plmn(self, plmn: str) -> bool:
-    """Sets the PLMN in ENB configuration.
-
-    Args:
-      plmn: The PLMN to be set. ex: 311480
-
-    Returns:
-      True if set PLMN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define PLMN \"00101\"'
-    string_to = f'#define PLMN \"{plmn}\"'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def mme_set_plmn(self, plmn: str) -> bool:
-    """Sets the PLMN in MME configuration.
-
-    Args:
-      plmn: The PLMN to be set. ex:'311480'
-
-    Returns:
-      True if set PLMN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'mme') + _CLIENT_CONFIG_DIR_MAPPING['mme']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define PLMN \"00101\"'
-    string_to = f'#define PLMN \"{plmn}\"'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_fdd_arfcn(self, arfcn: int) -> bool:
-    """Sets the FDD ARFCN in ENB configuration.
-
-    Args:
-      arfcn: The arfcn to be set. ex: 1400
-
-    Returns:
-      True if set FDD ARFCN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define FDD_CELL_earfcn 1400'
-    string_to = f'#define FDD_CELL_earfcn {arfcn}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_tdd_arfcn(self, arfcn: int) -> bool:
-    """Sets the TDD ARFCN in ENB configuration.
-
-    Args:
-      arfcn: The arfcn to be set. ex: 1400
-
-    Returns:
-      True if set FDD ARFCN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define TDD_CELL_earfcn 40620'
-    string_to = f'#define TDD_CELL_earfcn {arfcn}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_spectrum_tech(self, tech: int) -> bool:
-    """Sets the spectrum usage techniques in ENB configuration.
-
-    Args:
-      tech: the spectrum usage techniques. ex: SpecTech.FDD.name
-
-    Returns:
-      True if set spectrum usage techniques successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define TDD 0'
-    string_to = f'#define TDD {tech}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
diff --git a/src/antlion/controllers/amarisoft_lib/ims.py b/src/antlion/controllers/amarisoft_lib/ims.py
deleted file mode 100644
index ee575c4..0000000
--- a/src/antlion/controllers/amarisoft_lib/ims.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-from typing import Any, Mapping, Optional, Union
-
-from antlion.controllers.amarisoft_lib import amarisoft_client
-from antlion.controllers.amarisoft_lib import amarisoft_constants as const
-
-
-class ImsFunctions():
-  """Utilities for Amarisoft's IMS Remote API.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def make_call(self,
-              impi: str,
-              impu: str,
-              contact: str,
-              sip_file: str = 'mt_call_qos.sdp',
-              caller: str = 'Amarisoft',
-              duration: int = 30) -> None:
-    """Performs MT call from callbox to test device.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call.
-      impu: IMPU (IP Multimedia Public identity) of user to call.
-      contact: Contact SIP uri of user to call.
-      sip_file: Define file to use as sdp.
-      caller: The number/ID is displayed as the caller.
-      duration: If set, call duration in seconds (The server will close the
-        dialog).
-    """
-    msg = {}
-    msg['message'] = 'mt_call'
-    msg['impi'] = impi
-    msg['impu'] = impu
-    msg['contact'] = contact
-    msg['sip_file'] = sip_file
-    msg['caller'] = caller
-    msg['duration'] = duration
-    dump_msg = json.dumps(msg)
-    logging.debug('mt_call dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('mt_call', head, body)
-
-  def send_sms(self,
-               text: str,
-               impi: str,
-               sender: Optional[str] = 'Amarisoft') -> None:
-    """Sends SMS to assigned device which connect to Amarisoft.
-
-    Args:
-      text: SMS text to send.
-      impi: IMPI (IP Multimedia Private identity) of user.
-      sender: Sets SMS sender.
-    """
-    msg = {}
-    msg['message'] = 'sms'
-    msg['text'] = text
-    msg['impi'] = impi
-    msg['sender'] = sender
-    dump_msg = json.dumps(msg)
-    logging.debug('send_sms dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('sms', head, body)
-
-  def send_mms(self, filename: str, sender: str, receiver: str) -> None:
-    """Sends MMS to assigned device which connect to Amarisoft.
-
-    Args:
-      filename: File name with absolute path to send. Extensions jpg, jpeg, png,
-        gif and txt are supported.
-      sender: IMPI (IP Multimedia Private identity) of user.
-      receiver: IMPU (IP Multimedia Public identity) of user.
-    """
-    msg = {}
-    msg['message'] = 'mms'
-    msg['filename'] = filename
-    msg['sender'] = sender
-    msg['receiver'] = receiver
-    dump_msg = json.dumps(msg)
-    logging.debug('send_mms dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('mms', head, body)
-
-  def users_get(self, registered_only: bool = True) -> Mapping[str, Any]:
-    """Gets users state.
-
-    Args:
-      registered_only: If set, only registered user will be dumped.
-
-    Returns:
-      The user information.
-    """
-    msg = {}
-    msg['message'] = 'users_get'
-    msg['registered_only'] = registered_only
-    dump_msg = json.dumps(msg)
-    logging.debug('users_get dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    _, loaded_body = self.remote.verify_response('users_get', head, body)
-    return loaded_body
-
-  def get_impu(self, impi) -> Union[str, None]:
-    """Obtains the IMPU of the target user according to IMPI.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call. ex:
-        "310260123456785@ims.mnc260.mcc310.3gppnetwork.org"
-
-    Returns:
-      The IMPU of target user.
-    """
-    body = self.users_get(True)
-    for index in range(len(body['users'])):
-      if impi in body['users'][index]['impi']:
-        impu = body['users'][index]['bindings'][0]['impu'][1]
-        return impu
-    return None
-
-  def get_uri(self, impi) -> Union[str, None]:
-    """Obtains the URI of the target user according to IMPI.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call. ex:
-        "310260123456785@ims.mnc260.mcc310.3gppnetwork.org"
-
-    Returns:
-      The URI of target user.
-    """
-    body = self.users_get(True)
-    for index in range(len(body['users'])):
-      if impi in body['users'][index]['impi']:
-        uri = body['users'][index]['bindings'][0]['uri']
-        return uri
-    return None
diff --git a/src/antlion/controllers/amarisoft_lib/mme.py b/src/antlion/controllers/amarisoft_lib/mme.py
deleted file mode 100644
index 6f7ee42..0000000
--- a/src/antlion/controllers/amarisoft_lib/mme.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-
-from antlion.controllers.amarisoft_lib import amarisoft_constants as const
-from antlion.controllers.amarisoft_lib import amarisoft_client
-
-
-class MmeFunctions():
-  """Utilities for Amarisoft's MME Remote API.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def pws_write(self, local_id: str, n50: bool = False):
-    """Broadcasts emergency alert message.
-
-    Args:
-      local_id: ID of the message as defined by local identifier in MME
-        configuration file.
-      n50: If True, N50 interface is used, otherwise SBC interface is used. (see TS 23.041)
-    """
-    msg = {}
-    msg['message'] = 'pws_write'
-    msg['local_id'] = local_id
-    msg['nf'] = n50
-    dump_msg = json.dumps(msg)
-    logging.debug('pws_write dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('pws_write', head, body)
-
-  def pws_kill(self, local_id: str, n50: bool = False):
-    """Stops broadcasts emergency alert message.
-
-    Args:
-      local_id: ID of the message as defined by local identifier in MME
-        configuration file.
-      n50: If True, N50 interface is used, otherwise SBC interface is used. (see TS 23.041)
-    """
-    msg = {}
-    msg['message'] = 'pws_kill'
-    msg['local_id'] = local_id
-    msg['nf'] = n50
-    dump_msg = json.dumps(msg)
-    logging.debug('pws_kill dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('pws_kill', head, body)
-
-  def ue_del(self, imsi: str):
-    """Remove UE from the UE database and force disconnect if necessary.
-
-    Args:
-      imsi: IMSI of the UE to delete.
-    """
-    msg = {}
-    msg['message'] = 'ue_del'
-    msg['imsi'] = imsi
-    dump_msg = json.dumps(msg)
-    logging.debug('ue_del dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('ue_del', head, body)
diff --git a/src/antlion/controllers/amarisoft_lib/ssh_utils.py b/src/antlion/controllers/amarisoft_lib/ssh_utils.py
deleted file mode 100644
index fccc1d7..0000000
--- a/src/antlion/controllers/amarisoft_lib/ssh_utils.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-from typing import Sequence
-
-import paramiko
-
-COMMAND_RETRY_TIMES = 3
-
-
-class RunCommandError(Exception):
-  """Raises an error when run command fail."""
-
-
-class NotConnectedError(Exception):
-  """Raises an error when run command without SSH connect."""
-
-
-class RemoteClient:
-  """The SSH client class interacts with the test machine.
-
-  Attributes:
-    host: A string representing the IP address of amarisoft.
-    port: A string representing the default port of SSH.
-    username: A string representing the username of amarisoft.
-    password: A string representing the password of amarisoft.
-    ssh: A SSH client.
-    sftp: A SFTP client.
-  """
-
-  def __init__(self,
-               host: str,
-               username: str,
-               password: str,
-               port: str = '22') -> None:
-    self.host = host
-    self.port = port
-    self.username = username
-    self.password = password
-    self.ssh = paramiko.SSHClient()
-    self.sftp = None
-
-  def ssh_is_connected(self) -> bool:
-    """Checks SSH connect or not.
-
-    Returns:
-      True if SSH is connected, False otherwise.
-    """
-    return self.ssh and self.ssh.get_transport().is_active()
-
-  def ssh_close(self) -> bool:
-    """Closes the SSH connection.
-
-    Returns:
-      True if ssh session closed, False otherwise.
-    """
-    for _ in range(COMMAND_RETRY_TIMES):
-      if self.ssh_is_connected():
-        self.ssh.close()
-      else:
-        return True
-    return False
-
-  def connect(self) -> bool:
-    """Creats SSH connection.
-
-    Returns:
-      True if success, False otherwise.
-    """
-    for _ in range(COMMAND_RETRY_TIMES):
-      try:
-        self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-        self.ssh.connect(self.host, self.port, self.username, self.password)
-        self.ssh.get_transport().set_keepalive(1)
-        self.sftp = paramiko.SFTPClient.from_transport(self.ssh.get_transport())
-        return True
-      except Exception:  # pylint: disable=broad-except
-        self.ssh_close()
-    return False
-
-  def run_cmd(self, cmd: str) -> Sequence[str]:
-    """Runs shell command.
-
-    Args:
-      cmd: Command to be executed.
-
-    Returns:
-      Standard output of the shell command.
-
-    Raises:
-       RunCommandError: Raise error when command failed.
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-
-    logging.debug('ssh remote -> %s', cmd)
-    _, stdout, stderr = self.ssh.exec_command(cmd)
-    err = stderr.readlines()
-    if err:
-      logging.error('command failed.')
-      raise RunCommandError(err)
-    return stdout.readlines()
-
-  def is_file_exist(self, file: str) -> bool:
-    """Checks target file exist.
-
-    Args:
-        file: Target file with absolute path.
-
-    Returns:
-        True if file exist, false otherwise.
-    """
-    return any('exist' in line for line in self.run_cmd(
-        f'if [ -f "{file}" ]; then echo -e "exist"; fi'))
-
-  def sftp_upload(self, src: str, dst: str) -> bool:
-    """Uploads a local file to remote side.
-
-    Args:
-      src: The target file with absolute path.
-      dst: The absolute path to put the file with file name.
-      For example:
-        upload('/usr/local/google/home/zoeyliu/Desktop/sample_config.yml',
-        '/root/sample_config.yml')
-
-    Returns:
-      True if file upload success, False otherwise.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-
-    logging.info('[local] %s -> [remote] %s', src, dst)
-    self.sftp.put(src, dst)
-    return self.is_file_exist(dst)
-
-  def sftp_download(self, src: str, dst: str) -> bool:
-    """Downloads a file to local.
-
-    Args:
-      src: The target file with absolute path.
-      dst: The absolute path to put the file.
-
-    Returns:
-      True if file download success, False otherwise.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-
-    logging.info('[remote] %s -> [local] %s', src, dst)
-    self.sftp.get(src, dst)
-    return self.is_file_exist(dst)
-
-  def sftp_list_dir(self, path: str) -> Sequence[str]:
-    """Lists the names of the entries in the given path.
-
-    Args:
-      path: The path of the list.
-
-    Returns:
-      The names of the entries in the given path.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-    return sorted(self.sftp.listdir(path))
-
diff --git a/src/antlion/controllers/android_device.py b/src/antlion/controllers/android_device.py
index 1668d82..0eb0969 100755
--- a/src/antlion/controllers/android_device.py
+++ b/src/antlion/controllers/android_device.py
@@ -53,11 +53,20 @@
 ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY = "adb_logcat_param"
 ANDROID_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
 ANDROID_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-CRASH_REPORT_PATHS = ("/data/tombstones/", "/data/vendor/ramdump/",
-                      "/data/ramdump/", "/data/vendor/ssrdump",
-                      "/data/vendor/ramdump/bluetooth", "/data/vendor/log/cbd")
-CRASH_REPORT_SKIPS = ("RAMDUMP_RESERVED", "RAMDUMP_STATUS", "RAMDUMP_OUTPUT",
-                      "bluetooth")
+CRASH_REPORT_PATHS = (
+    "/data/tombstones/",
+    "/data/vendor/ramdump/",
+    "/data/ramdump/",
+    "/data/vendor/ssrdump",
+    "/data/vendor/ramdump/bluetooth",
+    "/data/vendor/log/cbd",
+)
+CRASH_REPORT_SKIPS = (
+    "RAMDUMP_RESERVED",
+    "RAMDUMP_STATUS",
+    "RAMDUMP_OUTPUT",
+    "bluetooth",
+)
 ALWAYS_ON_LOG_PATH = "/data/vendor/radio/logs/always-on"
 DEFAULT_QXDM_LOG_PATH = "/data/vendor/radio/diag_logs"
 DEFAULT_SDM_LOG_PATH = "/data/vendor/slog/"
@@ -72,7 +81,7 @@
 WAIT_FOR_DEVICE_TIMEOUT = 180
 ENCRYPTION_WINDOW = "CryptKeeper"
 DEFAULT_DEVICE_PASSWORD = "1111"
-RELEASE_ID_REGEXES = [re.compile(r'\w+\.\d+\.\d+'), re.compile(r'N\w+')]
+RELEASE_ID_REGEXES = [re.compile(r"\w+\.\d+\.\d+"), re.compile(r"N\w+")]
 
 
 def create(configs):
@@ -90,8 +99,7 @@
     elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN:
         ads = get_all_instances()
     elif not isinstance(configs, list):
-        raise errors.AndroidDeviceConfigError(
-            ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
+        raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
     elif isinstance(configs[0], str):
         # Configs is a list of serials.
         ads = get_instances(configs)
@@ -104,9 +112,10 @@
     for ad in ads:
         if not ad.is_connected():
             raise errors.AndroidDeviceError(
-                ("Android device %s is specified in config"
-                 " but is not attached.") % ad.serial,
-                serial=ad.serial)
+                ("Android device %s is specified in config" " but is not attached.")
+                % ad.serial,
+                serial=ad.serial,
+            )
     _start_services_on_ads(ads)
     for ad in ads:
         if ad.droid:
@@ -160,7 +169,7 @@
         try:
             ad.start_services()
         except:
-            ad.log.exception('Failed to start some services, abort!')
+            ad.log.exception("Failed to start some services, abort!")
             destroy(running_ads)
             raise
 
@@ -231,46 +240,50 @@
     results = []
     for c in configs:
         try:
-            serial = c.pop('serial')
+            serial = c.pop("serial")
         except KeyError:
             raise errors.AndroidDeviceConfigError(
-                "Required value 'serial' is missing in AndroidDevice config %s."
-                % c)
+                "Required value 'serial' is missing in AndroidDevice config %s." % c
+            )
         client_port = 0
         if ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY in c:
             try:
                 client_port = int(c.pop(ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY))
             except ValueError:
                 raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c))
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c)
+                )
         server_port = None
         if ANDROID_DEVICE_SL4A_SERVER_PORT_KEY in c:
             try:
                 server_port = int(c.pop(ANDROID_DEVICE_SL4A_SERVER_PORT_KEY))
             except ValueError:
                 raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c))
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c)
+                )
         forwarded_port = 0
         if ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY in c:
             try:
-                forwarded_port = int(
-                    c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY))
+                forwarded_port = int(c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY))
             except ValueError:
                 raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c))
-        ssh_config = c.pop('ssh_config', None)
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c)
+                )
+        ssh_config = c.pop("ssh_config", None)
         ssh_connection = None
         if ssh_config is not None:
             ssh_settings = settings.from_config(ssh_config)
             ssh_connection = connection.SshConnection(ssh_settings)
-        ad = AndroidDevice(serial,
-                           ssh_connection=ssh_connection,
-                           client_port=client_port,
-                           forwarded_port=forwarded_port,
-                           server_port=server_port)
+        ad = AndroidDevice(
+            serial,
+            ssh_connection=ssh_connection,
+            client_port=client_port,
+            forwarded_port=forwarded_port,
+            server_port=server_port,
+        )
         ad.load_config(c)
         results.append(ad)
     return results
@@ -342,8 +355,8 @@
     filtered = filter_devices(ads, _get_device_filter)
     if not filtered:
         raise ValueError(
-            "Could not find a target device that matches condition: %s." %
-            kwargs)
+            "Could not find a target device that matches condition: %s." % kwargs
+        )
     elif len(filtered) == 1:
         return filtered[0]
     else:
@@ -397,31 +410,32 @@
 
     """
 
-    def __init__(self,
-                 serial='',
-                 ssh_connection=None,
-                 client_port=0,
-                 forwarded_port=0,
-                 server_port=None):
+    def __init__(
+        self,
+        serial="",
+        ssh_connection=None,
+        client_port=0,
+        forwarded_port=0,
+        server_port=None,
+    ):
         self.serial = serial
         # logging.log_path only exists when this is used in an ACTS test run.
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_dir = 'AndroidDevice%s' % serial
+        log_path_base = getattr(logging, "log_path", "/tmp/logs")
+        self.log_dir = "AndroidDevice%s" % serial
         self.log_path = os.path.join(log_path_base, self.log_dir)
         self.client_port = client_port
         self.forwarded_port = forwarded_port
         self.server_port = server_port
         self.log = tracelogger.TraceLogger(
-            AndroidDeviceLoggerAdapter(logging.getLogger(),
-                                       {'serial': serial}))
+            AndroidDeviceLoggerAdapter(logging.getLogger(), {"serial": serial})
+        )
         self._event_dispatchers = {}
         self._services = []
         self.register_service(services.AdbLogcatService(self))
         self.register_service(services.Sl4aService(self))
         self.adb_logcat_process = None
         self.adb = adb.AdbProxy(serial, ssh_connection=ssh_connection)
-        self.fastboot = fastboot.FastbootProxy(serial,
-                                               ssh_connection=ssh_connection)
+        self.fastboot = fastboot.FastbootProxy(serial, ssh_connection=ssh_connection)
         if not self.is_bootloader:
             self.root_adb()
         self._ssh_connection = ssh_connection
@@ -474,7 +488,7 @@
         self.start_services()
 
     def register_service(self, service):
-        """Registers the service on the device. """
+        """Registers the service on the device."""
         service.register()
         self._services.append(service)
 
@@ -499,8 +513,9 @@
 
         Stop adb logcat and terminate sl4a sessions if exist.
         """
-        event_bus.post(android_events.AndroidStopServicesEvent(self),
-                       ignore_errors=True)
+        event_bus.post(
+            android_events.AndroidStopServicesEvent(self), ignore_errors=True
+        )
 
     def is_connected(self):
         out = self.adb.devices()
@@ -519,8 +534,7 @@
             device is in bootloader mode.
         """
         if self.is_bootloader:
-            self.log.error("Device is in fastboot mode, could not get build "
-                           "info.")
+            self.log.error("Device is in fastboot mode, could not get build " "info.")
             return
 
         build_id = self.adb.getprop("ro.build.id")
@@ -536,7 +550,7 @@
         info = {
             "build_id": build_id,
             "incremental_build_id": incremental_build_id,
-            "build_type": self.adb.getprop("ro.build.type")
+            "build_type": self.adb.getprop("ro.build.type"),
         }
         return info
 
@@ -548,11 +562,11 @@
         can be added via `add_device_info`.
         """
         info = {
-            'serial': self.serial,
-            'model': self.model,
-            'build_info': self.build_info,
-            'user_added_info': self._user_added_device_info,
-            'flavor': self.flavor
+            "serial": self.serial,
+            "model": self.model,
+            "build_info": self.build_info,
+            "user_added_info": self._user_added_device_info,
+            "flavor": self.flavor,
         }
         return info
 
@@ -571,23 +585,19 @@
         if self._sdk_api_level is not None:
             return self._sdk_api_level
         if self.is_bootloader:
-            self.log.error(
-                'Device is in fastboot mode. Cannot get build info.')
+            self.log.error("Device is in fastboot mode. Cannot get build info.")
             return
-        self._sdk_api_level = int(
-            self.adb.shell('getprop ro.build.version.sdk'))
+        self._sdk_api_level = int(self.adb.shell("getprop ro.build.version.sdk"))
         return self._sdk_api_level
 
     @property
     def is_bootloader(self):
-        """True if the device is in bootloader mode.
-        """
+        """True if the device is in bootloader mode."""
         return self.serial in list_fastboot_devices()
 
     @property
     def is_adb_root(self):
-        """True if adb is running as root for this device.
-        """
+        """True if adb is running as root for this device."""
         try:
             return "0" == self.adb.shell("id -u")
         except AdbError:
@@ -603,9 +613,9 @@
             out = self.fastboot.getvar("product").strip()
             # "out" is never empty because of the "total time" message fastboot
             # writes to stderr.
-            lines = out.split('\n', 1)
+            lines = out.split("\n", 1)
             if lines:
-                tokens = lines[0].split(' ')
+                tokens = lines[0].split(" ")
                 if len(tokens) > 1:
                     return tokens[1].lower()
             return None
@@ -634,8 +644,7 @@
         """Returns the event dispatcher of the first Sl4aSession created."""
         if len(self._sl4a_manager.sessions) > 0:
             session_id = sorted(self._sl4a_manager.sessions.keys())[0]
-            return self._sl4a_manager.sessions[
-                session_id].get_event_dispatcher()
+            return self._sl4a_manager.sessions[session_id].get_event_dispatcher()
         else:
             return None
 
@@ -646,8 +655,7 @@
 
     @property
     def is_adb_logcat_on(self):
-        """Whether there is an ongoing adb logcat collection.
-        """
+        """Whether there is an ongoing adb logcat collection."""
         if self.adb_logcat_process:
             if self.adb_logcat_process.is_running():
                 return True
@@ -656,7 +664,7 @@
                 # if logcat died due to device reboot and sl4a session has
                 # not restarted there is no droid.
                 if self.droid:
-                    self.droid.logI('Logcat died')
+                    self.droid.logI("Logcat died")
                 self.log.info("Logcat to %s died", self.log_path)
                 return False
         return False
@@ -686,9 +694,9 @@
             # skip_sl4a value can be reset from config file
             if hasattr(self, k) and k != "skip_sl4a":
                 raise errors.AndroidDeviceError(
-                    "Attempting to set existing attribute %s on %s" %
-                    (k, self.serial),
-                    serial=self.serial)
+                    "Attempting to set existing attribute %s on %s" % (k, self.serial),
+                    serial=self.serial,
+                )
             setattr(self, k, v)
 
     def root_adb(self):
@@ -702,7 +710,7 @@
 
         for attempt in range(ADB_ROOT_RETRY_COUNT):
             try:
-                self.log.debug('Enabling ADB root mode: attempt %d.' % attempt)
+                self.log.debug("Enabling ADB root mode: attempt %d." % attempt)
                 self.adb.root()
             except AdbError:
                 if attempt == ADB_ROOT_RETRY_COUNT:
@@ -737,12 +745,15 @@
             >>> droid, ed = ad.get_droid()
         """
         self.log.debug(
-            "Creating RPC client_port={}, forwarded_port={}, server_port={}".
-            format(self.client_port, self.forwarded_port, self.server_port))
+            "Creating RPC client_port={}, forwarded_port={}, server_port={}".format(
+                self.client_port, self.forwarded_port, self.server_port
+            )
+        )
         session = self._sl4a_manager.create_session(
             client_port=self.client_port,
             forwarded_port=self.forwarded_port,
-            server_port=self.server_port)
+            server_port=self.server_port,
+        )
         droid = session.rpc_client
         if handle_event:
             ed = session.get_event_dispatcher()
@@ -762,24 +773,31 @@
         """
         for cmd in ("ps -A", "ps"):
             try:
-                out = self.adb.shell('%s | grep "S %s"' % (cmd, package_name),
-                                     ignore_status=True)
+                out = self.adb.shell(
+                    '%s | grep "S %s"' % (cmd, package_name), ignore_status=True
+                )
                 if package_name not in out:
                     continue
                 try:
                     pid = int(out.split()[1])
-                    self.log.info('apk %s has pid %s.', package_name, pid)
+                    self.log.info("apk %s has pid %s.", package_name, pid)
                     return pid
                 except (IndexError, ValueError) as e:
                     # Possible ValueError from string to int cast.
                     # Possible IndexError from split.
                     self.log.warning(
-                        'Command \"%s\" returned output line: '
-                        '\"%s\".\nError: %s', cmd, out, e)
+                        'Command "%s" returned output line: ' '"%s".\nError: %s',
+                        cmd,
+                        out,
+                        e,
+                    )
             except Exception as e:
                 self.log.warning(
-                    'Device fails to check if %s running with \"%s\"\n'
-                    'Exception %s', package_name, cmd, e)
+                    'Device fails to check if %s running with "%s"\n' "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
         self.log.debug("apk %s is not running", package_name)
         return None
 
@@ -795,17 +813,11 @@
         return self._sl4a_manager.sessions[droid.uid].get_event_dispatcher()
 
     def _is_timestamp_in_range(self, target, log_begin_time, log_end_time):
-        low = acts_logger.logline_timestamp_comparator(log_begin_time,
-                                                       target) <= 0
-        high = acts_logger.logline_timestamp_comparator(log_end_time,
-                                                        target) >= 0
+        low = acts_logger.logline_timestamp_comparator(log_begin_time, target) <= 0
+        high = acts_logger.logline_timestamp_comparator(log_end_time, target) >= 0
         return low and high
 
-    def cat_adb_log(self,
-                    tag,
-                    begin_time,
-                    end_time=None,
-                    dest_path="AdbLogExcerpts"):
+    def cat_adb_log(self, tag, begin_time, end_time=None, dest_path="AdbLogExcerpts"):
         """Takes an excerpt of the adb logcat log from a certain time point to
         current time.
 
@@ -821,21 +833,24 @@
         else:
             log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
         self.log.debug("Extracting adb log from logcat.")
-        logcat_path = os.path.join(self.device_log_path,
-                                   'adblog_%s_debug.txt' % self.serial)
+        logcat_path = os.path.join(
+            self.device_log_path, "adblog_%s_debug.txt" % self.serial
+        )
         if not os.path.exists(logcat_path):
             self.log.warning("Logcat file %s does not exist." % logcat_path)
             return
         adb_excerpt_dir = os.path.join(self.log_path, dest_path)
         os.makedirs(adb_excerpt_dir, exist_ok=True)
-        out_name = '%s,%s.txt' % (acts_logger.normalize_log_line_timestamp(
-            log_begin_time), self.serial)
+        out_name = "%s,%s.txt" % (
+            acts_logger.normalize_log_line_timestamp(log_begin_time),
+            self.serial,
+        )
         tag_len = utils.MAX_FILENAME_LEN - len(out_name)
-        out_name = '%s,%s' % (tag[:tag_len], out_name)
+        out_name = "%s,%s" % (tag[:tag_len], out_name)
         adb_excerpt_path = os.path.join(adb_excerpt_dir, out_name)
-        with open(adb_excerpt_path, 'w', encoding='utf-8') as out:
+        with open(adb_excerpt_path, "w", encoding="utf-8") as out:
             in_file = logcat_path
-            with open(in_file, 'r', encoding='utf-8', errors='replace') as f:
+            with open(in_file, "r", encoding="utf-8", errors="replace") as f:
                 while True:
                     line = None
                     try:
@@ -844,21 +859,20 @@
                             break
                     except:
                         continue
-                    line_time = line[:acts_logger.log_line_timestamp_len]
+                    line_time = line[: acts_logger.log_line_timestamp_len]
                     if not acts_logger.is_valid_logline_timestamp(line_time):
                         continue
-                    if self._is_timestamp_in_range(line_time, log_begin_time,
-                                                   log_end_time):
-                        if not line.endswith('\n'):
-                            line += '\n'
+                    if self._is_timestamp_in_range(
+                        line_time, log_begin_time, log_end_time
+                    ):
+                        if not line.endswith("\n"):
+                            line += "\n"
                         out.write(line)
         return adb_excerpt_path
 
-    def search_logcat(self,
-                      matching_string,
-                      begin_time=None,
-                      end_time=None,
-                      logcat_path=None):
+    def search_logcat(
+        self, matching_string, begin_time=None, end_time=None, logcat_path=None
+    ):
         """Search logcat message with given string.
 
         Args:
@@ -887,29 +901,27 @@
               "message_id": "0853"}]
         """
         if not logcat_path:
-            logcat_path = os.path.join(self.device_log_path,
-                                       'adblog_%s_debug.txt' % self.serial)
+            logcat_path = os.path.join(
+                self.device_log_path, "adblog_%s_debug.txt" % self.serial
+            )
         if not os.path.exists(logcat_path):
             self.log.warning("Logcat file %s does not exist." % logcat_path)
             return
-        output = job.run("grep '%s' %s" % (matching_string, logcat_path),
-                         ignore_status=True)
+        output = job.run(
+            "grep '%s' %s" % (matching_string, logcat_path), ignore_status=True
+        )
         if not output.stdout or output.exit_status != 0:
             return []
         if begin_time:
             if not isinstance(begin_time, datetime):
-                log_begin_time = acts_logger.epoch_to_log_line_timestamp(
-                    begin_time)
-                begin_time = datetime.strptime(log_begin_time,
-                                               "%Y-%m-%d %H:%M:%S.%f")
+                log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
+                begin_time = datetime.strptime(log_begin_time, "%Y-%m-%d %H:%M:%S.%f")
         if end_time:
             if not isinstance(end_time, datetime):
-                log_end_time = acts_logger.epoch_to_log_line_timestamp(
-                    end_time)
-                end_time = datetime.strptime(log_end_time,
-                                             "%Y-%m-%d %H:%M:%S.%f")
+                log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
+                end_time = datetime.strptime(log_end_time, "%Y-%m-%d %H:%M:%S.%f")
         result = []
-        logs = re.findall(r'(\S+\s\S+)(.*)', output.stdout)
+        logs = re.findall(r"(\S+\s\S+)(.*)", output.stdout)
         for log in logs:
             time_stamp = log[0]
             time_obj = datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S.%f")
@@ -920,18 +932,20 @@
             if end_time and time_obj > end_time:
                 continue
 
-            res = re.findall(r'.*\[(\d+)\]', log[1])
+            res = re.findall(r".*\[(\d+)\]", log[1])
             try:
                 message_id = res[0]
             except:
                 message_id = None
 
-            result.append({
-                "log_message": "".join(log),
-                "time_stamp": time_stamp,
-                "datetime_obj": time_obj,
-                "message_id": message_id
-            })
+            result.append(
+                {
+                    "log_message": "".join(log),
+                    "time_stamp": time_stamp,
+                    "datetime_obj": time_obj,
+                    "message_id": message_id,
+                }
+            )
         return result
 
     def start_adb_logcat(self):
@@ -940,29 +954,30 @@
         """
         if self.is_adb_logcat_on:
             self.log.warning(
-                'Android device %s already has a running adb logcat thread. ' %
-                self.serial)
+                "Android device %s already has a running adb logcat thread. "
+                % self.serial
+            )
             return
         # Disable adb log spam filter. Have to stop and clear settings first
         # because 'start' doesn't support --clear option before Android N.
         self.adb.shell("logpersist.stop --clear", ignore_status=True)
         self.adb.shell("logpersist.start", ignore_status=True)
-        if hasattr(self, 'adb_logcat_param'):
+        if hasattr(self, "adb_logcat_param"):
             extra_params = self.adb_logcat_param
         else:
             extra_params = "-b all"
 
         self.adb_logcat_process = logcat.create_logcat_keepalive_process(
-            self.serial, self.log_dir, extra_params)
+            self.serial, self.log_dir, extra_params
+        )
         self.adb_logcat_process.start()
 
     def stop_adb_logcat(self):
-        """Stops the adb logcat collection subprocess.
-        """
+        """Stops the adb logcat collection subprocess."""
         if not self.is_adb_logcat_on:
             self.log.warning(
-                'Android device %s does not have an ongoing adb logcat ' %
-                self.serial)
+                "Android device %s does not have an ongoing adb logcat " % self.serial
+            )
             return
         # Set the last timestamp to the current timestamp. This may cause
         # a race condition that allows the same line to be logged twice,
@@ -979,8 +994,9 @@
         Returns:
         Linux UID for the apk.
         """
-        output = self.adb.shell("dumpsys package %s | grep userId=" % apk_name,
-                                ignore_status=True)
+        output = self.adb.shell(
+            "dumpsys package %s | grep userId=" % apk_name, ignore_status=True
+        )
         result = re.search(r"userId=(\d+)", output)
         if result:
             return result.group(1)
@@ -997,15 +1013,17 @@
             Version of the given apk.
         """
         try:
-            output = self.adb.shell("dumpsys package %s | grep versionName" %
-                                    package_name)
+            output = self.adb.shell(
+                "dumpsys package %s | grep versionName" % package_name
+            )
             pattern = re.compile(r"versionName=(.+)", re.I)
             result = pattern.findall(output)
             if result:
                 return result[0]
         except Exception as e:
-            self.log.warning("Fail to get the version of package %s: %s",
-                             package_name, e)
+            self.log.warning(
+                "Fail to get the version of package %s: %s", package_name, e
+            )
         self.log.debug("apk %s is not found", package_name)
         return None
 
@@ -1022,13 +1040,16 @@
         try:
             return bool(
                 self.adb.shell(
-                    '(pm list packages | grep -w "package:%s") || true' %
-                    package_name))
+                    '(pm list packages | grep -w "package:%s") || true' % package_name
+                )
+            )
 
         except Exception as err:
             self.log.error(
-                'Could not determine if %s is installed. '
-                'Received error:\n%s', package_name, err)
+                "Could not determine if %s is installed. " "Received error:\n%s",
+                package_name,
+                err,
+            )
             return False
 
     def is_sl4a_installed(self):
@@ -1045,15 +1066,19 @@
         """
         for cmd in ("ps -A", "ps"):
             try:
-                out = self.adb.shell('%s | grep "S %s"' % (cmd, package_name),
-                                     ignore_status=True)
+                out = self.adb.shell(
+                    '%s | grep "S %s"' % (cmd, package_name), ignore_status=True
+                )
                 if package_name in out:
                     self.log.info("apk %s is running", package_name)
                     return True
             except Exception as e:
                 self.log.warning(
-                    "Device fails to check is %s running by %s "
-                    "Exception %s", package_name, cmd, e)
+                    "Device fails to check is %s running by %s " "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
                 continue
         self.log.debug("apk %s is not running", package_name)
         return False
@@ -1071,8 +1096,7 @@
         True if package is installed. False otherwise.
         """
         try:
-            self.adb.shell('am force-stop %s' % package_name,
-                           ignore_status=True)
+            self.adb.shell("am force-stop %s" % package_name, ignore_status=True)
         except Exception as e:
             self.log.warning("Fail to stop package %s: %s", package_name, e)
 
@@ -1098,7 +1122,8 @@
         os.makedirs(br_path, exist_ok=True)
         epoch = begin_time if begin_time else utils.get_current_epoch_time()
         time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(epoch))
+            acts_logger.epoch_to_log_line_timestamp(epoch)
+        )
         out_name = "AndroidDevice%s_%s" % (self.serial, time_stamp)
         out_name = "%s.zip" % out_name if new_br else "%s.txt" % out_name
         full_out_path = os.path.join(br_path, out_name)
@@ -1112,25 +1137,24 @@
             out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT)
             if not out.startswith("OK"):
                 raise errors.AndroidDeviceError(
-                    'Failed to take bugreport on %s: %s' % (self.serial, out),
-                    serial=self.serial)
-            br_out_path = out.split(':')[1].strip().split()[0]
+                    "Failed to take bugreport on %s: %s" % (self.serial, out),
+                    serial=self.serial,
+                )
+            br_out_path = out.split(":")[1].strip().split()[0]
             self.adb.pull("%s %s" % (br_out_path, full_out_path))
         else:
-            self.adb.bugreport(" > {}".format(full_out_path),
-                               timeout=BUG_REPORT_TIMEOUT)
+            self.adb.bugreport(
+                " > {}".format(full_out_path), timeout=BUG_REPORT_TIMEOUT
+            )
         if test_name:
-            self.log.info("Bugreport for %s taken at %s.", test_name,
-                          full_out_path)
+            self.log.info("Bugreport for %s taken at %s.", test_name, full_out_path)
         else:
             self.log.info("Bugreport taken at %s.", test_name, full_out_path)
         self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
 
-    def get_file_names(self,
-                       directory,
-                       begin_time=None,
-                       skip_files=[],
-                       match_string=None):
+    def get_file_names(
+        self, directory, begin_time=None, skip_files=[], match_string=None
+    ):
         """Get files names with provided directory."""
         cmd = "find %s -type f" % directory
         if begin_time:
@@ -1142,8 +1166,12 @@
         for skip_file in skip_files:
             cmd = "%s ! -iname %s" % (cmd, skip_file)
         out = self.adb.shell(cmd, ignore_status=True)
-        if not out or "No such" in out or "Permission denied" in out or \
-            "Not a directory" in out:
+        if (
+            not out
+            or "No such" in out
+            or "Permission denied" in out
+            or "Not a directory" in out
+        ):
             return []
         files = out.split("\n")
         self.log.debug("Find files in directory %s: %s", directory, files)
@@ -1154,7 +1182,7 @@
         """
         The $EXTERNAL_STORAGE path on the device. Most commonly set to '/sdcard'
         """
-        return self.adb.shell('echo $EXTERNAL_STORAGE')
+        return self.adb.shell("echo $EXTERNAL_STORAGE")
 
     def file_exists(self, file_path):
         """Returns whether a file exists on a device.
@@ -1162,14 +1190,16 @@
         Args:
             file_path: The path of the file to check for.
         """
-        cmd = '(test -f %s && echo yes) || echo no' % file_path
+        cmd = "(test -f %s && echo yes) || echo no" % file_path
         result = self.adb.shell(cmd)
-        if result == 'yes':
+        if result == "yes":
             return True
-        elif result == 'no':
+        elif result == "no":
             return False
-        raise ValueError('Couldn\'t determine if %s exists. '
-                         'Expected yes/no, got %s' % (file_path, result[cmd]))
+        raise ValueError(
+            "Couldn't determine if %s exists. "
+            "Expected yes/no, got %s" % (file_path, result[cmd])
+        )
 
     def pull_files(self, device_paths, host_path=None):
         """Pull files from devices.
@@ -1183,39 +1213,37 @@
         if not host_path:
             host_path = self.log_path
         for device_path in device_paths:
-            self.log.info('Pull from device: %s -> %s' %
-                          (device_path, host_path))
-            self.adb.pull("%s %s" % (device_path, host_path),
-                          timeout=PULL_TIMEOUT)
+            self.log.info("Pull from device: %s -> %s" % (device_path, host_path))
+            self.adb.pull("%s %s" % (device_path, host_path), timeout=PULL_TIMEOUT)
 
-    def check_crash_report(self,
-                           test_name=None,
-                           begin_time=None,
-                           log_crash_report=False):
+    def check_crash_report(
+        self, test_name=None, begin_time=None, log_crash_report=False
+    ):
         """check crash report on the device."""
         crash_reports = []
         for crash_path in CRASH_REPORT_PATHS:
             try:
-                cmd = 'cd %s' % crash_path
+                cmd = "cd %s" % crash_path
                 self.adb.shell(cmd)
             except Exception as e:
                 self.log.debug("received exception %s", e)
                 continue
-            crashes = self.get_file_names(crash_path,
-                                          skip_files=CRASH_REPORT_SKIPS,
-                                          begin_time=begin_time)
+            crashes = self.get_file_names(
+                crash_path, skip_files=CRASH_REPORT_SKIPS, begin_time=begin_time
+            )
             if crash_path == "/data/tombstones/" and crashes:
                 tombstones = crashes[:]
                 for tombstone in tombstones:
                     if self.adb.shell(
-                            'cat %s | grep "crash_dump failed to dump process"'
-                            % tombstone):
+                        'cat %s | grep "crash_dump failed to dump process"' % tombstone
+                    ):
                         crashes.remove(tombstone)
             if crashes:
                 crash_reports.extend(crashes)
         if crash_reports and log_crash_report:
-            crash_log_path = os.path.join(self.device_log_path,
-                                          "Crashes_%s" % self.serial)
+            crash_log_path = os.path.join(
+                self.device_log_path, "Crashes_%s" % self.serial
+            )
             os.makedirs(crash_log_path, exist_ok=True)
             self.pull_files(crash_reports, crash_log_path)
         return crash_reports
@@ -1225,35 +1253,38 @@
         # Sleep 10 seconds for the buffered log to be written in qxdm log file
         time.sleep(10)
         log_path = getattr(self, "qxdm_log_path", DEFAULT_QXDM_LOG_PATH)
-        qxdm_logs = self.get_file_names(log_path,
-                                        begin_time=begin_time,
-                                        match_string="*.qmdl")
+        qxdm_logs = self.get_file_names(
+            log_path, begin_time=begin_time, match_string="*.qmdl"
+        )
         if qxdm_logs:
-            qxdm_log_path = os.path.join(self.device_log_path,
-                                         "QXDM_%s" % self.serial)
+            qxdm_log_path = os.path.join(self.device_log_path, "QXDM_%s" % self.serial)
             os.makedirs(qxdm_log_path, exist_ok=True)
 
             self.log.info("Pull QXDM Log %s to %s", qxdm_logs, qxdm_log_path)
             self.pull_files(qxdm_logs, qxdm_log_path)
 
-            self.adb.pull("/firmware/image/qdsp6m.qdb %s" % qxdm_log_path,
-                          timeout=PULL_TIMEOUT,
-                          ignore_status=True)
+            self.adb.pull(
+                "/firmware/image/qdsp6m.qdb %s" % qxdm_log_path,
+                timeout=PULL_TIMEOUT,
+                ignore_status=True,
+            )
             # Zip Folder
-            utils.zip_directory('%s.zip' % qxdm_log_path, qxdm_log_path)
+            utils.zip_directory("%s.zip" % qxdm_log_path, qxdm_log_path)
             shutil.rmtree(qxdm_log_path)
         else:
             self.log.error("Didn't find QXDM logs in %s." % log_path)
         if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(self.device_log_path,
-                                          "OMADM_%s" % self.serial)
+            omadm_log_path = os.path.join(
+                self.device_log_path, "OMADM_%s" % self.serial
+            )
             os.makedirs(omadm_log_path, exist_ok=True)
             self.log.info("Pull OMADM Log")
             self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s" %
-                omadm_log_path,
+                "/data/data/com.android.omadm.service/files/dm/log/ %s"
+                % omadm_log_path,
                 timeout=PULL_TIMEOUT,
-                ignore_status=True)
+                ignore_status=True,
+            )
 
     def get_sdm_logs(self, test_name="", begin_time=None):
         """Get sdm logs."""
@@ -1261,31 +1292,32 @@
         time.sleep(10)
         log_paths = [
             ALWAYS_ON_LOG_PATH,
-            getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH)
+            getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH),
         ]
         sdm_logs = []
         for path in log_paths:
-            sdm_logs += self.get_file_names(path,
-                                            begin_time=begin_time,
-                                            match_string="*.sdm*")
+            sdm_logs += self.get_file_names(
+                path, begin_time=begin_time, match_string="*.sdm*"
+            )
         if sdm_logs:
-            sdm_log_path = os.path.join(self.device_log_path,
-                                        "SDM_%s" % self.serial)
+            sdm_log_path = os.path.join(self.device_log_path, "SDM_%s" % self.serial)
             os.makedirs(sdm_log_path, exist_ok=True)
             self.log.info("Pull SDM Log %s to %s", sdm_logs, sdm_log_path)
             self.pull_files(sdm_logs, sdm_log_path)
         else:
             self.log.error("Didn't find SDM logs in %s." % log_paths)
         if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(self.device_log_path,
-                                          "OMADM_%s" % self.serial)
+            omadm_log_path = os.path.join(
+                self.device_log_path, "OMADM_%s" % self.serial
+            )
             os.makedirs(omadm_log_path, exist_ok=True)
             self.log.info("Pull OMADM Log")
             self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s" %
-                omadm_log_path,
+                "/data/data/com.android.omadm.service/files/dm/log/ %s"
+                % omadm_log_path,
                 timeout=PULL_TIMEOUT,
-                ignore_status=True)
+                ignore_status=True,
+            )
 
     def start_new_session(self, max_connections=None, server_port=None):
         """Start a new session in sl4a.
@@ -1301,7 +1333,8 @@
             existing uid to a new session.
         """
         session = self._sl4a_manager.create_session(
-            max_connections=max_connections, server_port=server_port)
+            max_connections=max_connections, server_port=server_port
+        )
 
         self._sl4a_manager.sessions[session.uid] = session
         return session.rpc_client
@@ -1313,11 +1346,9 @@
         """
         self._sl4a_manager.terminate_all_sessions()
 
-    def run_iperf_client_nb(self,
-                            server_host,
-                            extra_args="",
-                            timeout=IPERF_TIMEOUT,
-                            log_file_path=None):
+    def run_iperf_client_nb(
+        self, server_host, extra_args="", timeout=IPERF_TIMEOUT, log_file_path=None
+    ):
         """Start iperf client on the device asynchronously.
 
         Return status as true if iperf client start successfully.
@@ -1335,10 +1366,7 @@
             cmd += " --logfile {} &".format(log_file_path)
         self.adb.shell_nb(cmd)
 
-    def run_iperf_client(self,
-                         server_host,
-                         extra_args="",
-                         timeout=IPERF_TIMEOUT):
+    def run_iperf_client(self, server_host, extra_args="", timeout=IPERF_TIMEOUT):
         """Start iperf client on the device.
 
         Return status as true if iperf client start successfully.
@@ -1353,9 +1381,10 @@
             status: true if iperf client start successfully.
             results: results have data flow information
         """
-        out = self.adb.shell("iperf3 -c {} {}".format(server_host, extra_args),
-                             timeout=timeout)
-        clean_out = out.split('\n')
+        out = self.adb.shell(
+            "iperf3 -c {} {}".format(server_host, extra_args), timeout=timeout
+        )
+        clean_out = out.split("\n")
         if "error" in clean_out[0].lower():
             return False, clean_out
         return True, clean_out
@@ -1373,7 +1402,7 @@
             results: results have output of command
         """
         out = self.adb.shell("iperf3 -s {}".format(extra_args))
-        clean_out = out.split('\n')
+        clean_out = out.split("\n")
         if "error" in clean_out[0].lower():
             return False, clean_out
         return True, clean_out
@@ -1393,7 +1422,7 @@
         while time.time() < timeout_start + timeout:
             try:
                 completed = self.adb.getprop("sys.boot_completed")
-                if completed == '1':
+                if completed == "1":
                     self.log.debug("Device has rebooted")
                     return
             except AdbError:
@@ -1402,13 +1431,12 @@
                 pass
             time.sleep(5)
         raise errors.AndroidDeviceError(
-            'Device %s booting process timed out.' % self.serial,
-            serial=self.serial)
+            "Device %s booting process timed out." % self.serial, serial=self.serial
+        )
 
-    def reboot(self,
-               stop_at_lock_screen=False,
-               timeout=180,
-               wait_after_reboot_complete=1):
+    def reboot(
+        self, stop_at_lock_screen=False, timeout=180, wait_after_reboot_complete=1
+    ):
         """Reboots the device.
 
         Terminate all sl4a sessions, reboot the device, wait for device to
@@ -1438,16 +1466,15 @@
         while time.time() < timeout_start + timeout:
             try:
                 self.adb.get_state()
-                time.sleep(.1)
+                time.sleep(0.1)
             except AdbError:
                 # get_state will raise an error if the device is not found. We
                 # want the device to be missing to prove the device has kicked
                 # off the reboot.
                 break
-        self.wait_for_boot_completion(timeout=(timeout - time.time() +
-                                               timeout_start))
+        self.wait_for_boot_completion(timeout=(timeout - time.time() + timeout_start))
 
-        self.log.debug('Wait for a while after boot completion.')
+        self.log.debug("Wait for a while after boot completion.")
         time.sleep(wait_after_reboot_complete)
         self.root_adb()
         skip_sl4a = self.skip_sl4a
@@ -1473,19 +1500,18 @@
 
         self.start_services()
 
-    def get_ipv4_address(self, interface='wlan0', timeout=5):
+    def get_ipv4_address(self, interface="wlan0", timeout=5):
         for timer in range(0, timeout):
             try:
-                ip_string = self.adb.shell('ifconfig %s|grep inet' % interface)
+                ip_string = self.adb.shell("ifconfig %s|grep inet" % interface)
                 break
             except adb.AdbError as e:
                 if timer + 1 == timeout:
-                    self.log.warning('Unable to find IP address for %s.' %
-                                     interface)
+                    self.log.warning("Unable to find IP address for %s." % interface)
                     return None
                 else:
                     time.sleep(1)
-        result = re.search('addr:(.*) Bcast', ip_string)
+        result = re.search("addr:(.*) Bcast", ip_string)
         if result != None:
             ip_address = result.group(1)
             try:
@@ -1499,16 +1525,15 @@
     def get_ipv4_gateway(self, timeout=5):
         for timer in range(0, timeout):
             try:
-                gateway_string = self.adb.shell(
-                    'dumpsys wifi | grep mDhcpResults')
+                gateway_string = self.adb.shell("dumpsys wifi | grep mDhcpResults")
                 break
             except adb.AdbError as e:
                 if timer + 1 == timeout:
-                    self.log.warning('Unable to find gateway')
+                    self.log.warning("Unable to find gateway")
                     return None
                 else:
                     time.sleep(1)
-        result = re.search('Gateway (.*) DNS servers', gateway_string)
+        result = re.search("Gateway (.*) DNS servers", gateway_string)
         if result != None:
             ipv4_gateway = result.group(1)
             try:
@@ -1525,28 +1550,33 @@
     def get_my_current_focus_window(self):
         """Get the current focus window on screen"""
         output = self.adb.shell(
-            'dumpsys window displays | grep -E mCurrentFocus | grep -v null',
-            ignore_status=True)
+            "dumpsys window displays | grep -E mCurrentFocus | grep -v null",
+            ignore_status=True,
+        )
         if not output or "not found" in output or "Can't find" in output:
-            result = ''
+            result = ""
         else:
-            result = output.split(' ')[-1].strip("}")
+            result = output.split(" ")[-1].strip("}")
         self.log.debug("Current focus window is %s", result)
         return result
 
     def get_my_current_focus_app(self):
         """Get the current focus application"""
         dumpsys_cmd = [
-            'dumpsys window | grep -E mFocusedApp',
-            'dumpsys window displays | grep -E mFocusedApp'
+            "dumpsys window | grep -E mFocusedApp",
+            "dumpsys window displays | grep -E mFocusedApp",
         ]
         for cmd in dumpsys_cmd:
             output = self.adb.shell(cmd, ignore_status=True)
-            if not output or "not found" in output or "Can't find" in output or (
-                    "mFocusedApp=null" in output):
-                result = ''
+            if (
+                not output
+                or "not found" in output
+                or "Can't find" in output
+                or ("mFocusedApp=null" in output)
+            ):
+                result = ""
             else:
-                result = output.split(' ')[-2]
+                result = output.split(" ")[-2]
                 break
         self.log.debug("Current focus app is %s", result)
         return result
@@ -1557,18 +1587,16 @@
             return window_name in current_window
         return current_window and ENCRYPTION_WINDOW not in current_window
 
-    def wait_for_window_ready(self,
-                              window_name=None,
-                              check_interval=5,
-                              check_duration=60):
+    def wait_for_window_ready(
+        self, window_name=None, check_interval=5, check_duration=60
+    ):
         elapsed_time = 0
         while elapsed_time < check_duration:
             if self.is_window_ready(window_name=window_name):
                 return True
             time.sleep(check_interval)
             elapsed_time += check_interval
-        self.log.info("Current focus window is %s",
-                      self.get_my_current_focus_window())
+        self.log.info("Current focus window is %s", self.get_my_current_focus_window())
         return False
 
     def is_user_setup_complete(self):
@@ -1592,7 +1620,7 @@
 
     def is_screen_lock_enabled(self):
         """Check if screen lock is enabled"""
-        cmd = ("dumpsys window policy | grep showing=")
+        cmd = "dumpsys window policy | grep showing="
         out = self.adb.shell(cmd, ignore_status=True)
         return "true" in out
 
@@ -1604,7 +1632,8 @@
             self.log.info("Device is in CrpytKeeper window")
             return True
         if "StatusBar" in current_window and (
-            (not current_app) or "FallbackHome" in current_app):
+            (not current_app) or "FallbackHome" in current_app
+        ):
             self.log.info("Device is locked")
             return True
         return False
@@ -1618,8 +1647,10 @@
                 if self.is_waiting_for_unlock_pin():
                     self.unlock_screen(password=DEFAULT_DEVICE_PASSWORD)
                     time.sleep(1)
-                if not self.is_waiting_for_unlock_pin(
-                ) and self.wait_for_window_ready():
+                if (
+                    not self.is_waiting_for_unlock_pin()
+                    and self.wait_for_window_ready()
+                ):
                     return True
             return False
         else:
@@ -1671,16 +1702,19 @@
 
     def exit_setup_wizard(self):
         # Handling Android TV's setupwizard is ignored for now.
-        if 'feature:android.hardware.type.television' in self.adb.shell(
-                'pm list features'):
+        if "feature:android.hardware.type.television" in self.adb.shell(
+            "pm list features"
+        ):
             return
         if not self.is_user_setup_complete() or self.is_setupwizard_on():
             # b/116709539 need this to prevent reboot after skip setup wizard
-            self.adb.shell("am start -a com.android.setupwizard.EXIT",
-                           ignore_status=True)
-            self.adb.shell("pm disable %s" %
-                           self.get_setupwizard_package_name(),
-                           ignore_status=True)
+            self.adb.shell(
+                "am start -a com.android.setupwizard.EXIT", ignore_status=True
+            )
+            self.adb.shell(
+                "pm disable %s" % self.get_setupwizard_package_name(),
+                ignore_status=True,
+            )
         # Wait up to 5 seconds for user_setup_complete to be updated
         end_time = time.time() + 5
         while time.time() < end_time:
@@ -1705,9 +1739,11 @@
         android_package_name = "com.google.android"
         package = self.adb.shell(
             "pm list packages -f | grep -E {} | grep {}".format(
-                packages_to_skip, android_package_name))
-        wizard_package = package.split('=')[1]
-        activity = package.split('=')[0].split('/')[-2]
+                packages_to_skip, android_package_name
+            )
+        )
+        wizard_package = package.split("=")[1]
+        activity = package.split("=")[0].split("/")[-2]
         self.log.info("%s/.%sActivity" % (wizard_package, activity))
         return "%s/.%sActivity" % (wizard_package, activity)
 
@@ -1728,16 +1764,25 @@
         try:
             self.ensure_verity_disabled()
             self.adb.remount()
-            out = self.adb.push('%s %s' % (src_file_path, dst_file_path),
-                                timeout=push_timeout)
-            if 'error' in out:
-                self.log.error('Unable to push system file %s to %s due to %s',
-                               src_file_path, dst_file_path, out)
+            out = self.adb.push(
+                "%s %s" % (src_file_path, dst_file_path), timeout=push_timeout
+            )
+            if "error" in out:
+                self.log.error(
+                    "Unable to push system file %s to %s due to %s",
+                    src_file_path,
+                    dst_file_path,
+                    out,
+                )
                 return False
             return True
         except Exception as e:
-            self.log.error('Unable to push system file %s to %s due to %s',
-                           src_file_path, dst_file_path, e)
+            self.log.error(
+                "Unable to push system file %s to %s due to %s",
+                src_file_path,
+                dst_file_path,
+                e,
+            )
             return False
 
     def ensure_verity_enabled(self):
@@ -1748,8 +1793,8 @@
         """
         user = self.adb.get_user_id()
         # The below properties will only exist if verity has been enabled.
-        system_verity = self.adb.getprop('partition.system.verified')
-        vendor_verity = self.adb.getprop('partition.vendor.verified')
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
         if not system_verity or not vendor_verity:
             self.adb.ensure_root()
             self.adb.enable_verity()
@@ -1763,8 +1808,8 @@
         """
         user = self.adb.get_user_id()
         # The below properties will only exist if verity has been enabled.
-        system_verity = self.adb.getprop('partition.system.verified')
-        vendor_verity = self.adb.getprop('partition.vendor.verified')
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
         if system_verity or vendor_verity:
             self.adb.ensure_root()
             self.adb.disable_verity()
diff --git a/src/antlion/controllers/android_lib/android_api.py b/src/antlion/controllers/android_lib/android_api.py
deleted file mode 100644
index d58fe46..0000000
--- a/src/antlion/controllers/android_lib/android_api.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import sys
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.libs import version_selector
-
-
-class AndroidApi:
-    OLDEST = 0
-    MINIMUM = 0
-    L = 21
-    L_MR1 = 22
-    M = 23
-    N = 24
-    N_MR1 = 25
-    O = 26
-    O_MR1 = 27
-    P = 28
-    LATEST = sys.maxsize
-    MAX = sys.maxsize
-
-
-def android_api(min_api=AndroidApi.OLDEST, max_api=AndroidApi.LATEST):
-    """Decorates a function to only be called for the given API range.
-
-    Only gets called if the AndroidDevice in the args is within the specified
-    API range. Otherwise, a different function may be called instead. If the
-    API level is out of range, and no other function handles that API level, an
-    error is raise instead.
-
-    Note: In Python3.5 and below, the order of kwargs is not preserved. If your
-          function contains multiple AndroidDevices within the kwargs, and no
-          AndroidDevices within args, you are NOT guaranteed the first
-          AndroidDevice is the same one chosen each time the function runs. Due
-          to this, we do not check for AndroidDevices in kwargs.
-
-    Args:
-         min_api: The minimum API level. Can be an int or an AndroidApi value.
-         max_api: The maximum API level. Can be an int or an AndroidApi value.
-    """
-
-    def get_api_level(*args, **_):
-        for arg in args:
-            if isinstance(arg, AndroidDevice):
-                return arg.sdk_api_level()
-        logging.getLogger().error(
-            'An AndroidDevice was not found in the given '
-            'arguments.')
-        return None
-
-    return version_selector.set_version(get_api_level, min_api, max_api)
diff --git a/src/antlion/controllers/android_lib/logcat.py b/src/antlion/controllers/android_lib/logcat.py
index 4e2c4fd..0a5e8f7 100644
--- a/src/antlion/controllers/android_lib/logcat.py
+++ b/src/antlion/controllers/android_lib/logcat.py
@@ -21,7 +21,7 @@
 from antlion.libs.logging import log_stream
 from antlion.libs.logging.log_stream import LogStyles
 
-TIMESTAMP_REGEX = r'((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)'
+TIMESTAMP_REGEX = r"((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)"
 
 
 class TimestampTracker(object):
@@ -43,17 +43,17 @@
 
 def _get_log_level(message):
     """Returns the log level for the given message."""
-    if message.startswith('-') or len(message) < 37:
+    if message.startswith("-") or len(message) < 37:
         return logging.ERROR
     else:
         log_level = message[36]
-        if log_level in ('V', 'D'):
+        if log_level in ("V", "D"):
             return logging.DEBUG
-        elif log_level == 'I':
+        elif log_level == "I":
             return logging.INFO
-        elif log_level == 'W':
+        elif log_level == "W":
             return logging.WARNING
-        elif log_level == 'E':
+        elif log_level == "E":
             return logging.ERROR
     return logging.NOTSET
 
@@ -71,15 +71,18 @@
 def _on_retry(serial, extra_params, timestamp_tracker):
     def on_retry(_):
         begin_at = '"%s"' % (timestamp_tracker.last_timestamp or 1)
-        additional_params = extra_params or ''
+        additional_params = extra_params or ""
 
-        return 'adb -s %s logcat -T %s -v year %s' % (
-            serial, begin_at, additional_params)
+        return "adb -s %s logcat -T %s -v year %s" % (
+            serial,
+            begin_at,
+            additional_params,
+        )
 
     return on_retry
 
 
-def create_logcat_keepalive_process(serial, logcat_dir, extra_params=''):
+def create_logcat_keepalive_process(serial, logcat_dir, extra_params=""):
     """Creates a Logcat Process that automatically attempts to reconnect.
 
     Args:
@@ -91,12 +94,15 @@
         A acts.libs.proc.process.Process object.
     """
     logger = log_stream.create_logger(
-        'adblog_%s' % serial, log_name=serial, subcontext=logcat_dir,
-        log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG))
-    process = Process('adb -s %s logcat -T 1 -v year %s' %
-                      (serial, extra_params))
+        "adblog_%s" % serial,
+        log_name=serial,
+        subcontext=logcat_dir,
+        log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG),
+    )
+    process = Process("adb -s %s logcat -T 1 -v year %s" % (serial, extra_params))
     timestamp_tracker = TimestampTracker()
     process.set_on_output_callback(_log_line_func(logger, timestamp_tracker))
     process.set_on_terminate_callback(
-        _on_retry(serial, extra_params, timestamp_tracker))
+        _on_retry(serial, extra_params, timestamp_tracker)
+    )
     return process
diff --git a/src/antlion/controllers/android_lib/services.py b/src/antlion/controllers/android_lib/services.py
index 6c5f334..098f524 100644
--- a/src/antlion/controllers/android_lib/services.py
+++ b/src/antlion/controllers/android_lib/services.py
@@ -14,7 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 from antlion.controllers.android_lib import errors
 from antlion.controllers.android_lib import events as android_events
 from antlion.event import event_bus
@@ -44,14 +43,22 @@
         """Registers the _start and _stop methods to their corresponding
         events.
         """
+
         def check_serial(event):
             return self.serial == event.ad.serial
 
         self._registration_ids = [
-            event_bus.register(android_events.AndroidStartServicesEvent,
-                               self._start, filter_fn=check_serial),
-            event_bus.register(android_events.AndroidStopServicesEvent,
-                               self._stop, filter_fn=check_serial)]
+            event_bus.register(
+                android_events.AndroidStartServicesEvent,
+                self._start,
+                filter_fn=check_serial,
+            ),
+            event_bus.register(
+                android_events.AndroidStopServicesEvent,
+                self._stop,
+                filter_fn=check_serial,
+            ),
+        ]
 
     def unregister(self):
         """Unregisters all subscriptions in this service."""
@@ -93,14 +100,15 @@
             return
 
         if not self.ad.is_sl4a_installed():
-            self.ad.log.error('sl4a.apk is not installed')
+            self.ad.log.error("sl4a.apk is not installed")
             raise errors.AndroidDeviceError(
-                'The required sl4a.apk is not installed',
-                serial=self.serial)
+                "The required sl4a.apk is not installed", serial=self.serial
+            )
         if not self.ad.ensure_screen_on():
             self.ad.log.error("User window cannot come up")
             raise errors.AndroidDeviceError(
-                "User window cannot come up", serial=self.serial)
+                "User window cannot come up", serial=self.serial
+            )
 
         droid, ed = self.ad.get_droid()
         ed.start()
diff --git a/src/antlion/controllers/android_lib/tel/tel_utils.py b/src/antlion/controllers/android_lib/tel/tel_utils.py
index 0be8ef5..c18741c 100644
--- a/src/antlion/controllers/android_lib/tel/tel_utils.py
+++ b/src/antlion/controllers/android_lib/tel/tel_utils.py
@@ -13,7 +13,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 """Generic telephony utility functions. Cloned from test_utils.tel."""
 
 import re
@@ -89,21 +88,27 @@
 
 
 def dumpsys_last_call_info(ad):
-    """ Get call information by dumpsys telecom. """
+    """Get call information by dumpsys telecom."""
     num = dumpsys_last_call_number(ad)
     output = ad.adb.shell("dumpsys telecom")
     result = re.search(r"Call TC@%s: {(.*?)}" % num, output, re.DOTALL)
     call_info = {"TC": num}
     if result:
         result = result.group(1)
-        for attr in ("startTime", "endTime", "direction", "isInterrupted",
-                     "callTechnologies", "callTerminationsReason",
-                     "isVideoCall", "callProperties"):
+        for attr in (
+            "startTime",
+            "endTime",
+            "direction",
+            "isInterrupted",
+            "callTechnologies",
+            "callTerminationsReason",
+            "isVideoCall",
+            "callProperties",
+        ):
             match = re.search(r"%s: (.*)" % attr, result)
             if match:
                 if attr in ("startTime", "endTime"):
-                    call_info[attr] = epoch_to_log_line_timestamp(
-                        int(match.group(1)))
+                    call_info[attr] = epoch_to_log_line_timestamp(int(match.group(1)))
                 else:
                     call_info[attr] = match.group(1)
     ad.log.debug("call_info = %s", call_info)
@@ -124,8 +129,7 @@
 
 
 def get_outgoing_voice_sub_id(ad):
-    """ Get outgoing voice subscription id
-    """
+    """Get outgoing voice subscription id"""
     if hasattr(ad, "outgoing_voice_sub_id"):
         return ad.outgoing_voice_sub_id
     else:
@@ -133,7 +137,7 @@
 
 
 def get_rx_tx_power_levels(log, ad):
-    """ Obtains Rx and Tx power levels from the MDS application.
+    """Obtains Rx and Tx power levels from the MDS application.
 
     The method requires the MDS app to be installed in the DUT.
 
@@ -146,36 +150,40 @@
         in Rx chain, and the second element is the transmitted power in dBm.
         Values for invalid Rx / Tx chains are set to None.
     """
-    cmd = ('am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e '
-           'response wait "com.google.mdstest/com.google.mdstest.instrument.'
-           'ModemCommandInstrumentation"')
+    cmd = (
+        'am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e '
+        'response wait "com.google.mdstest/com.google.mdstest.instrument.'
+        'ModemCommandInstrumentation"'
+    )
     try:
         output = ad.adb.shell(cmd)
     except AdbCommandError as e:
         log.error(e)
         output = None
 
-    if not output or 'result=SUCCESS' not in output:
-        raise RuntimeError('Could not obtain Tx/Rx power levels from MDS. Is '
-                           'the MDS app installed?')
+    if not output or "result=SUCCESS" not in output:
+        raise RuntimeError(
+            "Could not obtain Tx/Rx power levels from MDS. Is " "the MDS app installed?"
+        )
 
     response = re.search(r"(?<=response=).+", output)
 
     if not response:
-        raise RuntimeError('Invalid response from the MDS app:\n' + output)
+        raise RuntimeError("Invalid response from the MDS app:\n" + output)
 
     # Obtain a list of bytes in hex format from the response string
-    response_hex = response.group(0).split(' ')
+    response_hex = response.group(0).split(" ")
 
     def get_bool(pos):
-        """ Obtain a boolean variable from the byte array. """
-        return response_hex[pos] == '01'
+        """Obtain a boolean variable from the byte array."""
+        return response_hex[pos] == "01"
 
     def get_int32(pos):
-        """ Obtain an int from the byte array. Bytes are printed in
+        """Obtain an int from the byte array. Bytes are printed in
         little endian format."""
         return struct.unpack(
-            '<i', bytearray.fromhex(''.join(response_hex[pos:pos + 4])))[0]
+            "<i", bytearray.fromhex("".join(response_hex[pos : pos + 4]))
+        )[0]
 
     rx_power = []
     RX_CHAINS = 4
@@ -204,7 +212,7 @@
 
 
 def get_telephony_signal_strength(ad):
-    #{'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0,
+    # {'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0,
     # 'cdmaAsuLevel': 99, 'evdoDbm': -120, 'gsmDbm': -1, 'cdmaEcio': -160,
     # 'level': 2, 'lteLevel': 2, 'cdmaDbm': -120, 'dbm': -112, 'cdmaLevel': 0,
     # 'lteAsuLevel': 28, 'gsmAsuLevel': 99, 'gsmBitErrorRate': 0,
@@ -219,12 +227,14 @@
     return signal_strength
 
 
-def initiate_call(log,
-                  ad,
-                  callee_number,
-                  emergency=False,
-                  incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND,
-                  video=False):
+def initiate_call(
+    log,
+    ad,
+    callee_number,
+    emergency=False,
+    incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND,
+    video=False,
+):
     """Make phone call from caller to callee.
 
     Args:
@@ -253,7 +263,8 @@
 
         # Verify OFFHOOK state
         if not wait_for_call_offhook_for_subscription(
-                log, ad, sub_id, event_tracking_started=True):
+            log, ad, sub_id, event_tracking_started=True
+        ):
             ad.log.info("sub_id %s not in call offhook state", sub_id)
             last_call_drop_reason(ad, begin_time=begin_time)
             return False
@@ -299,7 +310,7 @@
         False otherwise.
     """
     try:
-        value_in_event = event['data'][field]
+        value_in_event = event["data"][field]
     except KeyError:
         return False
     for value in value_list:
@@ -319,12 +330,14 @@
         return ad.droid.telecomIsInCall()
     except:
         return "mCallState=2" in ad.adb.shell(
-            "dumpsys telephony.registry | grep mCallState")
+            "dumpsys telephony.registry | grep mCallState"
+        )
 
 
 def last_call_drop_reason(ad, begin_time=None):
     reasons = ad.search_logcat(
-        "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time)
+        "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time
+    )
     reason_string = ""
     if reasons:
         log_msg = "Logcat call drop reasons:"
@@ -333,17 +346,15 @@
             if "ril reason str" in reason["log_message"]:
                 reason_string = reason["log_message"].split(":")[-1].strip()
         ad.log.info(log_msg)
-    reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION",
-                               begin_time)
+    reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION", begin_time)
     if reasons:
         ad.log.warning("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION is seen")
-    ad.log.info("last call dumpsys: %s",
-                sorted(dumpsys_last_call_info(ad).items()))
+    ad.log.info("last call dumpsys: %s", sorted(dumpsys_last_call_info(ad).items()))
     return reason_string
 
 
 def toggle_airplane_mode(log, ad, new_state=None, strict_checking=True):
-    """ Toggle the state of airplane mode.
+    """Toggle the state of airplane mode.
 
     Args:
         log: log handler.
@@ -359,11 +370,12 @@
         return toggle_airplane_mode_by_adb(log, ad, new_state)
     else:
         return toggle_airplane_mode_msim(
-            log, ad, new_state, strict_checking=strict_checking)
+            log, ad, new_state, strict_checking=strict_checking
+        )
 
 
 def toggle_airplane_mode_by_adb(log, ad, new_state=None):
-    """ Toggle the state of airplane mode.
+    """Toggle the state of airplane mode.
 
     Args:
         log: log handler.
@@ -392,7 +404,7 @@
 
 
 def toggle_airplane_mode_msim(log, ad, new_state=None, strict_checking=True):
-    """ Toggle the state of airplane mode.
+    """Toggle the state of airplane mode.
 
     Args:
         log: log handler.
@@ -411,13 +423,12 @@
         return True
     elif new_state is None:
         new_state = not cur_state
-        ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state,
-                    new_state)
+        ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state, new_state)
     sub_id_list = []
     active_sub_info = ad.droid.subscriptionGetAllSubInfoList()
     if active_sub_info:
         for info in active_sub_info:
-            sub_id_list.append(info['subscriptionId'])
+            sub_id_list.append(info["subscriptionId"])
 
     ad.ed.clear_all_events()
     time.sleep(0.1)
@@ -436,8 +447,7 @@
         ad.log.info("Turn off airplane mode")
 
     for sub_id in sub_id_list:
-        ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(
-            sub_id)
+        ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(sub_id)
 
     timeout_time = time.time() + MAX_WAIT_TIME_AIRPLANEMODE_EVENT
     ad.droid.connectivityToggleAirplaneMode(new_state)
@@ -449,35 +459,39 @@
                 is_event_match_for_list,
                 timeout=MAX_WAIT_TIME_AIRPLANEMODE_EVENT,
                 field=ServiceStateContainer.SERVICE_STATE,
-                value_list=service_state_list)
+                value_list=service_state_list,
+            )
             ad.log.info("Got event %s", event)
         except Empty:
-            ad.log.warning("Did not get expected service state change to %s",
-                           service_state_list)
+            ad.log.warning(
+                "Did not get expected service state change to %s", service_state_list
+            )
         finally:
             for sub_id in sub_id_list:
-                ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(
-                    sub_id)
+                ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(sub_id)
     except Exception as e:
         ad.log.error(e)
 
     # APM on (new_state=True) will turn off bluetooth but may not turn it on
     try:
         if new_state and not _wait_for_bluetooth_in_state(
-                log, ad, False, timeout_time - time.time()):
-            ad.log.error(
-                "Failed waiting for bluetooth during airplane mode toggle")
-            if strict_checking: return False
+            log, ad, False, timeout_time - time.time()
+        ):
+            ad.log.error("Failed waiting for bluetooth during airplane mode toggle")
+            if strict_checking:
+                return False
     except Exception as e:
         ad.log.error("Failed to check bluetooth state due to %s", e)
         if strict_checking:
             raise
 
     # APM on (new_state=True) will turn off wifi but may not turn it on
-    if new_state and not _wait_for_wifi_in_state(log, ad, False,
-                                                 timeout_time - time.time()):
+    if new_state and not _wait_for_wifi_in_state(
+        log, ad, False, timeout_time - time.time()
+    ):
         ad.log.error("Failed waiting for wifi during airplane mode toggle on")
-        if strict_checking: return False
+        if strict_checking:
+            return False
 
     if ad.droid.connectivityCheckAirplaneMode() != new_state:
         ad.log.error("Set airplane mode to %s failed", new_state)
@@ -516,11 +530,12 @@
 
 
 def wait_for_call_offhook_event(
-        log,
-        ad,
-        sub_id,
-        event_tracking_started=False,
-        timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT):
+    log,
+    ad,
+    sub_id,
+    event_tracking_started=False,
+    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
+):
     """Wait for an incoming call on specified subscription.
 
     Args:
@@ -542,25 +557,26 @@
             is_event_match,
             timeout=timeout,
             field=CallStateContainer.CALL_STATE,
-            value=TELEPHONY_STATE_OFFHOOK)
+            value=TELEPHONY_STATE_OFFHOOK,
+        )
         ad.log.info("Got event %s", TELEPHONY_STATE_OFFHOOK)
     except Empty:
         ad.log.info("No event for call state change to OFFHOOK")
         return False
     finally:
         if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(
-                sub_id)
+            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
     return True
 
 
 def wait_for_call_offhook_for_subscription(
-        log,
-        ad,
-        sub_id,
-        event_tracking_started=False,
-        timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
-        interval=WAIT_TIME_BETWEEN_STATE_CHECK):
+    log,
+    ad,
+    sub_id,
+    event_tracking_started=False,
+    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
+    interval=WAIT_TIME_BETWEEN_STATE_CHECK,
+):
     """Wait for an incoming call on specified subscription.
 
     Args:
@@ -582,32 +598,32 @@
     try:
         while time.time() < end_time:
             if not offhook_event_received:
-                if wait_for_call_offhook_event(log, ad, sub_id, True,
-                                               interval):
+                if wait_for_call_offhook_event(log, ad, sub_id, True, interval):
                     offhook_event_received = True
-            telephony_state = ad.droid.telephonyGetCallStateForSubscription(
-                sub_id)
+            telephony_state = ad.droid.telephonyGetCallStateForSubscription(sub_id)
             telecom_state = ad.droid.telecomGetCallState()
             if telephony_state == TELEPHONY_STATE_OFFHOOK and (
-                    telecom_state == TELEPHONY_STATE_OFFHOOK):
+                telecom_state == TELEPHONY_STATE_OFFHOOK
+            ):
                 ad.log.info("telephony and telecom are in OFFHOOK state")
                 return True
             else:
                 ad.log.info(
                     "telephony in %s, telecom in %s, expecting OFFHOOK state",
-                    telephony_state, telecom_state)
+                    telephony_state,
+                    telecom_state,
+                )
             if offhook_event_received:
                 time.sleep(interval)
     finally:
         if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(
-                sub_id)
+            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
 
 
 def _wait_for_bluetooth_in_state(log, ad, state, max_wait):
     # FIXME: These event names should be defined in a common location
-    _BLUETOOTH_STATE_ON_EVENT = 'BluetoothStateChangedOn'
-    _BLUETOOTH_STATE_OFF_EVENT = 'BluetoothStateChangedOff'
+    _BLUETOOTH_STATE_ON_EVENT = "BluetoothStateChangedOn"
+    _BLUETOOTH_STATE_OFF_EVENT = "BluetoothStateChangedOff"
     ad.ed.clear_events(_BLUETOOTH_STATE_ON_EVENT)
     ad.ed.clear_events(_BLUETOOTH_STATE_OFF_EVENT)
 
@@ -617,20 +633,21 @@
         if bt_state == state:
             return True
         if max_wait <= 0:
-            ad.log.error("Time out: bluetooth state still %s, expecting %s",
-                         bt_state, state)
+            ad.log.error(
+                "Time out: bluetooth state still %s, expecting %s", bt_state, state
+            )
             return False
 
-        event = {
-            False: _BLUETOOTH_STATE_OFF_EVENT,
-            True: _BLUETOOTH_STATE_ON_EVENT
-        }[state]
+        event = {False: _BLUETOOTH_STATE_OFF_EVENT, True: _BLUETOOTH_STATE_ON_EVENT}[
+            state
+        ]
         event = ad.ed.pop_event(event, max_wait)
-        ad.log.info("Got event %s", event['name'])
+        ad.log.info("Got event %s", event["name"])
         return True
     except Empty:
-        ad.log.error("Time out: bluetooth state still in %s, expecting %s",
-                     bt_state, state)
+        ad.log.error(
+            "Time out: bluetooth state still in %s, expecting %s", bt_state, state
+        )
         return False
     finally:
         ad.droid.bluetoothStopListeningForAdapterStateChange()
@@ -651,8 +668,7 @@
     return _wait_for_droid_in_state(log, ad, max_time, is_phone_in_call)
 
 
-def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args,
-                             **kwargs):
+def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args, **kwargs):
     while max_time >= 0:
         if state_check_func(log, ad, *args, **kwargs):
             return True
@@ -665,7 +681,10 @@
 
 # TODO: replace this with an event-based function
 def _wait_for_wifi_in_state(log, ad, state, max_wait):
-    return _wait_for_droid_in_state(log, ad, max_wait,
-        lambda log, ad, state: \
-                (True if ad.droid.wifiCheckState() == state else False),
-                state)
+    return _wait_for_droid_in_state(
+        log,
+        ad,
+        max_wait,
+        lambda log, ad, state: (True if ad.droid.wifiCheckState() == state else False),
+        state,
+    )
diff --git a/src/antlion/controllers/anritsu_lib/OWNERS b/src/antlion/controllers/anritsu_lib/OWNERS
deleted file mode 100644
index e4010df..0000000
--- a/src/antlion/controllers/anritsu_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/anritsu_lib/__init__.py b/src/antlion/controllers/anritsu_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/anritsu_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/anritsu_lib/_anritsu_utils.py b/src/antlion/controllers/anritsu_lib/_anritsu_utils.py
deleted file mode 100644
index ea5736f..0000000
--- a/src/antlion/controllers/anritsu_lib/_anritsu_utils.py
+++ /dev/null
@@ -1,233 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Utility functions for for Anritsu Signalling Tester.
-"""
-# yapf: disable
-
-OPERATION_COMPLETE = 1
-NO_ERROR = 0
-
-ANRITSU_ERROR_CODES = {
-    0: 'No errors occurred',
-    2: 'The specified file does not exist',
-    14: 'The buffer size is insufficient',
-    29: 'The save destination is a write-protected file.',
-    80: 'A file with the same name already exists.'
-        ' (If Overwrite is specified to 0.)',
-    87: 'The specified value is wrong.',
-    112: 'The disk space is insufficient.',
-    183: 'SmartStudio is already running.',
-    1060: 'The control software has not been started or has already terminated',
-    1067: 'SmartStudio, control software or SMS Centre could not start due to'
-          'a problem or problems resulting from OS or the MD8475A system.',
-    1229: 'Connecting to the server failed.',
-    1235: 'A request is suspended.',
-    1460: 'The operation is terminated due to the expiration of the'
-          ' timeout period.',
-    9999: 'A GPIB command error occurred.',
-    536870912: 'The license could not be confirmed.',
-    536870913: 'The specified file cannot be loaded by the SmartStudio.',
-    536870914: 'The specified process ID does not exist.',
-    536870915: 'The received data does not exist.',
-    536870916: 'Simulation is not running.',
-    536870917: 'Simulation is running.',
-    536870918: 'Test Case has never been executed.',
-    536870919: 'The resource cannot be obtained.',
-    536870920: 'A resource protocol error, such as download error or'
-               ' license error, occurred.',
-    536870921: 'The function call has been in invalid status.',
-    536870922: 'The current Simulation Model does not allow the operation.',
-    536870923: 'The Cell name to be set does not exist.',
-    536870924: 'The test is being executed.',
-    536870925: 'The current UE status does not correspond to the'
-               ' test parameters.',
-    536870926: 'There is no LOG information because the simulation'
-               ' has not been executed.',
-    536870927: 'Measure Export has already been executed.',
-    536870928: 'SmartStudio is not connected to the SMS Centre.',
-    536870929: 'SmartStudio failed to send an SMS message to the SMS Centre.',
-    536870930: 'SmartStudio has successfully sent an SMS message'
-               ' to the SMS Centre,but the SMS Centre judges it as an error.',
-    536870931: 'The processing that is unavailable with the current system'
-               ' status has been executed.',
-    536870932: 'The option could not be confirmed.',
-    536870933: 'Measure Export has been stopped.',
-    536870934: 'SmartStudio cannot load the specified file because the'
-               ' version is old.',
-    536870935: 'The data with the specified PDN number does not exist.',
-    536870936: 'The data with the specified Dedicated number does not exist.',
-    536870937: 'The PDN data cannot be added because the upper limit of the'
-               ' number of PDN data has been reached.',
-    536870938: 'The number of antennas, which cannot be set to the current'
-               ' Simulation Model,has been specified.',
-    536870939: 'Calibration of path loss failed.',
-    536870940: 'There is a parameter conflict.',
-    536870941: 'The DL Ref Power setting is out of the setting range'
-               ' at W-CDMA (Evolution).',
-    536870942: 'DC-HSDPA is not available for the current channel setting.',
-    536870943: 'The specified Packet Rate cannot be used by the current'
-               ' Simulation Model.',
-    536870944: 'The W-CDMA Cell parameter F-DPCH is set to Enable.',
-    536870945: 'Target is invalid.',
-    536870946: 'The PWS Centre detects an error.',
-    536870947: 'The Ec/Ior setting is invalid.',
-    536870948: 'The combination of Attach Type and TA Update Type is invalid.',
-    536870949: 'The license of the option has expired.',
-    536870950: 'The Ping command is being executed.',
-    536870951: 'The Ping command is not being executed.',
-    536870952: 'The current Test Case parameter setting is wrong.',
-    536870953: 'The specified IP address is the same as that of Default Gateway'
-               'specified by Simulation parameter.',
-    536870954: 'TFT IE conversion failed.',
-    536870955: 'Saving settings to the SmartStudio scenario failed.',
-    536875008: 'An error exists in the parameter configuration.'
-               '(This error applies only to the current version.)',
-    536936448: 'License verification failed.',
-    536936449: 'The IMS Services cannot load the specified file.',
-    536936462: 'Simulation is not performed and no log information exists.',
-    536936467: 'The executed process is inoperable in the current status'
-               ' of Visual User Agent.',
-    536936707: 'The specified Virtual Network is not running.',
-    536936709: 'The specified Virtual Network is running. '
-               'Any one of the Virtual Networks is running.',
-    536936727: 'The specified Virtual Network does not exist.',
-    536936729: 'When the Virtual Network already exists.',
-    554762241: 'The RF Measurement launcher cannot be accessed.',
-    554762242: 'License check of the RF Measurement failed.',
-    554762243: 'Function is called when RF Measurement cannot be set.',
-    554762244: 'RF Measurement has been already started.',
-    554762245: 'RF Measurement failed to start due to a problem resulting'
-               ' from OS or the MD8475A system.',
-    554762246: 'RF Measurement is not started or is already terminated.',
-    554762247: 'There is a version mismatch between RF Measurement and CAL.',
-    554827777: 'The specified value for RF Measurement is abnormal.',
-    554827778: 'GPIB command error has occurred in RF Measurement.',
-    554827779: 'Invalid file path was specified to RF Measurement.',
-    554827780: 'RF Measurement argument is NULL pointer.',
-    555810817: 'RF Measurement is now performing the measurement.',
-    555810818: 'RF Measurement is now not performing the measurement.',
-    555810819: 'RF Measurement is not measured yet. (There is no result '
-               'information since measurement is not performed.)',
-    555810820: 'An error has occurred when RF Measurement'
-               ' starts the measurement.',
-    555810821: 'Simulation has stopped when RF Measurement is '
-               'performing the measurement.',
-    555810822: 'An error has been retrieved from the Platform when '
-               'RF Measurement is performing the measurement.',
-    555810823: 'Measurement has been started in the system state where RF '
-               'Measurement is invalid.',
-    556859393: 'RF Measurement is now saving a file.',
-    556859394: 'There is insufficient disk space when saving'
-               'a Measure Result file of RF Measurement.',
-    556859395: 'An internal error has occurred or USB cable has been'
-               ' disconnected when saving a Measure Result'
-               ' file of RF Measurement.',
-    556859396: 'A write-protected file was specified as the save destination'
-               ' when saving a Measure Result file of RF Measurement.',
-    568328193: 'An internal error has occurred in RF Measurement.',
-    687865857: 'Calibration Measure DSP is now being measured.',
-    687865858: 'Calibration measurement failed.',
-    687865859: 'Calibration slot is empty or its system does not apply.',
-    687865860: 'Unexpected command is received from Calibration HWC.',
-    687865861: 'Failed to receive the Calibration measurement result.',
-    687865862: 'Failed to open the correction value file on the'
-               ' Calibration HDD.',
-    687865863: 'Failed to move the pointer on the Calibration correction'
-               ' value table.',
-    687865864: 'Failed to write the correction value to the Calibration'
-               ' correction value file on the Calibration HDD.',
-    687865865: 'Failed to load the correction value from the Calibration HDD.',
-    687865866: 'Failed to create a directory to which the correction value '
-               'file on the Calibration HDD is saved.',
-    687865867: 'Correction data has not been written in the'
-               ' Calibration-specified correction table.',
-    687865868: 'Data received from Calibration HWC does not exist.',
-    687865869: 'Data has not been written to the Flash ROM'
-               ' of Calibration BASE UNIT.',
-    687865870: 'Correction data has not been written to the'
-               ' Calibration-specified sector.',
-    687866111: 'An calibration error other than described above occurred.',
-}
-
-
-def _error_code_tostring(error_code):
-    ''' returns the description of the error from the error code
-    returned by anritsu MD8475A '''
-    try:
-        error_string = ANRITSU_ERROR_CODES[error_code]
-    except KeyError:
-        error_string = "Error : {} ".format(error_code)
-
-    return error_string
-
-
-class AnritsuUtils(object):
-    def gsm_encode(text):
-        '''To encode text string with GSM 7-bit alphabet for common symbols'''
-        table = {' ': '%20', '!': '%21', '\"': '%22', '#': '%23', '$': '%24',
-                 '/': '%2F', '%': '%25', '&': '%26', '\'': '%27', '(': '%28',
-                 ')': '%29', '*': '%2A', '+': '%2B', ',': '%2C', ':': '%3A',
-                 ';': '%3B', '<': '%3C', '=': '%3D', '>': '%3E', '?': '%3F',
-                 '@': '%40', '[': '%5B', ']': '%5D', '_': '%5F', 'é': '%C3%A9'}
-        coded_str = ""
-        for char in text:
-            if char in table:
-                coded_str += table[char]
-            else:
-                coded_str += char
-        return coded_str
-
-    def gsm_decode(text):
-        '''To decode text string with GSM 7-bit alphabet for common symbols'''
-        table = {'%20': ' ', '%21': '!', '%22': '\"', '%23': '#', '%24': '$',
-                 '%2F': '/', '%25': '%', '%26': '&', '%27': '\'', '%28': '(',
-                 '%29': ')', '%2A': '*', '%2B': '+', '%2C': ',', '%3A': ':',
-                 '%3B': ';', '%3C': '<', '%3D': '=', '%3E': '>', '%3F': '?',
-                 '%40': '@', '%5B': '[', '%5D': ']', '%5F': '_', '%C3%A9': 'é'}
-        coded_str = text
-        for char in table:
-            if char in text:
-                coded_str = coded_str.replace(char, table[char])
-        return coded_str
-
-    def cdma_encode(text):
-        '''To encode text string with GSM 7-bit alphabet for common symbols'''
-        table = {' ': '%20', '!': '%21', '\"': '%22', '#': '%23', '$': '%24',
-                 '/': '%2F', '%': '%25', '&': '%26', '\'': '%27', '(': '%28',
-                 ')': '%29', '*': '%2A', '+': '%2B', ',': '%2C', ':': '%3A',
-                 ';': '%3B', '<': '%3C', '=': '%3D', '>': '%3E', '?': '%3F',
-                 '@': '%40', '[': '%5B', ']': '%5D', '_': '%5F'}
-        coded_str = ""
-        for char in text:
-            if char in table:
-                coded_str += table[char]
-            else:
-                coded_str += char
-        return coded_str
-
-class AnritsuError(Exception):
-    '''Exception for errors related to Anritsu.'''
-    def __init__(self, error, command=None):
-        self._error_code = error
-        self._error_message = _error_code_tostring(self._error_code)
-        if command is not None:
-            self._error_message = "Command {} returned the error: '{}'".format(
-                                  command, self._error_message)
-
-    def __str__(self):
-        return self._error_message
-# yapf: enable
diff --git a/src/antlion/controllers/anritsu_lib/band_constants.py b/src/antlion/controllers/anritsu_lib/band_constants.py
deleted file mode 100644
index 18dd5bc..0000000
--- a/src/antlion/controllers/anritsu_lib/band_constants.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# GSM BAND constants
-GSM_BAND_GSM450 = "GSM450"
-GSM_BAND_GSM480 = "GSM480"
-GSM_BAND_GSM850 = "GSM850"
-GSM_BAND_PGSM900 = "P-GSM900"
-GSM_BAND_EGSM900 = "E-GSM900"
-GSM_BAND_RGSM900 = "R-GSM900"
-GSM_BAND_DCS1800 = "DCS1800"
-GSM_BAND_PCS1900 = "PCS1900"
-
-LTE_BAND_2 = 2
-LTE_BAND_4 = 4
-LTE_BAND_12 = 12
-WCDMA_BAND_1 = 1
-WCDMA_BAND_2 = 2
diff --git a/src/antlion/controllers/anritsu_lib/cell_configurations.py b/src/antlion/controllers/anritsu_lib/cell_configurations.py
deleted file mode 100644
index 83773e0..0000000
--- a/src/antlion/controllers/anritsu_lib/cell_configurations.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Sanity tests for voice tests in telephony
-"""
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_PCS1900
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_GSM850
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_2
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_4
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_12
-from antlion.controllers.anritsu_lib.band_constants import WCDMA_BAND_1
-from antlion.controllers.anritsu_lib.band_constants import WCDMA_BAND_2
-from antlion.controllers.anritsu_lib.md8475a import BtsBandwidth
-
-# Different Cell configurations
-# TMO bands
-lte_band4_ch2000_fr2115_pcid1_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 11,
-    'cid': 1,
-    'pcid': 1,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid2_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 12,
-    'cid': 2,
-    'pcid': 2,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid3_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 13,
-    'cid': 3,
-    'pcid': 3,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid4_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 14,
-    'cid': 4,
-    'pcid': 4,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid5_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 15,
-    'cid': 5,
-    'pcid': 5,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid6_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 16,
-    'cid': 6,
-    'pcid': 6,
-    'channel': 2000
-}
-
-lte_band4_ch2050_fr2120_pcid7_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 17,
-    'cid': 7,
-    'pcid': 7,
-    'channel': 2050
-}
-
-lte_band4_ch2250_fr2140_pcid8_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 18,
-    'cid': 8,
-    'pcid': 8,
-    'channel': 2250
-}
-
-lte_band2_ch900_fr1960_pcid9_cell = {
-    'band': LTE_BAND_2,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 19,
-    'cid': 9,
-    'pcid': 9,
-    'channel': 900
-}
-
-lte_band12_ch5095_fr737_pcid10_cell = {
-    'band': LTE_BAND_12,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 20,
-    'cid': 10,
-    'pcid': 10,
-    'channel': 5095
-}
-
-wcdma_band1_ch10700_fr2140_cid31_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 31,
-    'rac': 31,
-    'cid': 31,
-    'channel': 10700,
-    'psc': 31
-}
-
-wcdma_band1_ch10700_fr2140_cid32_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 32,
-    'rac': 32,
-    'cid': 32,
-    'channel': 10700,
-    'psc': 32
-}
-
-wcdma_band1_ch10700_fr2140_cid33_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 33,
-    'rac': 33,
-    'cid': 33,
-    'channel': 10700,
-    'psc': 33
-}
-
-wcdma_band1_ch10700_fr2140_cid34_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 34,
-    'rac': 34,
-    'cid': 34,
-    'channel': 10700,
-    'psc': 34
-}
-
-wcdma_band1_ch10700_fr2140_cid35_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 35,
-    'rac': 35,
-    'cid': 35,
-    'channel': 10700,
-    'psc': 35
-}
-
-wcdma_band1_ch10575_fr2115_cid36_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 36,
-    'rac': 36,
-    'cid': 36,
-    'channel': 10575,
-    'psc': 36
-}
-
-wcdma_band1_ch10800_fr2160_cid37_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 37,
-    'rac': 37,
-    'cid': 37,
-    'channel': 10800,
-    'psc': 37
-}
-
-wcdma_band2_ch9800_fr1960_cid38_cell = {
-    'band': WCDMA_BAND_2,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 38,
-    'rac': 38,
-    'cid': 38,
-    'channel': 9800,
-    'psc': 38
-}
-
-wcdma_band2_ch9900_fr1980_cid39_cell = {
-    'band': WCDMA_BAND_2,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 39,
-    'rac': 39,
-    'cid': 39,
-    'channel': 9900,
-    'psc': 39
-}
-
-gsm_band1900_ch512_fr1930_cid51_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 51,
-    'rac': 51,
-    'cid': 51,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid52_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 52,
-    'rac': 52,
-    'cid': 52,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid53_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 53,
-    'rac': 53,
-    'cid': 53,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid54_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 54,
-    'rac': 54,
-    'cid': 54,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid55_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 55,
-    'rac': 55,
-    'cid': 55,
-    'channel': 512,
-}
-
-gsm_band1900_ch640_fr1955_cid56_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 56,
-    'rac': 56,
-    'cid': 56,
-    'channel': 640,
-}
-
-gsm_band1900_ch750_fr1977_cid57_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 57,
-    'rac': 57,
-    'cid': 57,
-    'channel': 750,
-}
-
-gsm_band850_ch128_fr869_cid58_cell = {
-    'band': GSM_BAND_GSM850,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 58,
-    'rac': 58,
-    'cid': 58,
-    'channel': 128,
-}
-
-gsm_band850_ch251_fr893_cid59_cell = {
-    'band': GSM_BAND_GSM850,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 59,
-    'rac': 59,
-    'cid': 59,
-    'channel': 251,
-}
diff --git a/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py b/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py
deleted file mode 100644
index 55a89e9..0000000
--- a/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py
+++ /dev/null
@@ -1,732 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-import ntpath
-import time
-import antlion.controllers.cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-from antlion.controllers.anritsu_lib import md8475a
-from antlion.controllers.anritsu_lib import _anritsu_utils as anritsu
-
-
-class MD8475CellularSimulator(cc.AbstractCellularSimulator):
-
-    MD8475_VERSION = 'A'
-
-    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
-    LTE_SUPPORTS_DL_256QAM = False
-
-    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
-    LTE_SUPPORTS_UL_64QAM = False
-
-    # Indicates if 4x4 MIMO is supported for LTE
-    LTE_SUPPORTS_4X4_MIMO = False
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 2
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = -10
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-    LTE_BASIC_SIM_FILE = 'SIM_default_LTE.wnssp'
-    LTE_BASIC_CELL_FILE = 'CELL_LTE_config.wnscp'
-    LTE_CA_BASIC_SIM_FILE = 'SIM_LTE_CA.wnssp'
-    LTE_CA_BASIC_CELL_FILE = 'CELL_LTE_CA_config.wnscp'
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_CONFIG_PATH = 'C:\\Users\\MD8475A\\Documents\\DAN_configs\\'
-
-    def __init__(self, ip_address):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the MD8475 instrument
-        """
-        super().__init__()
-
-        try:
-            self.anritsu = md8475a.MD8475A(ip_address,
-                                           md8475_version=self.MD8475_VERSION)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('Could not connect to MD8475.')
-
-        self.bts = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.anritsu.stop_simulation()
-        self.anritsu.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        cell_file_name = self.LTE_BASIC_CELL_FILE
-        sim_file_name = self.LTE_BASIC_SIM_FILE
-
-        cell_file_path = ntpath.join(self.CALLBOX_CONFIG_PATH, cell_file_name)
-        sim_file_path = ntpath.join(self.CALLBOX_CONFIG_PATH, sim_file_name)
-
-        self.anritsu.load_simulation_paramfile(sim_file_path)
-        self.anritsu.load_cell_paramfile(cell_file_path)
-
-        # MD4875A supports only 2 carriers. The MD4875B class adds other cells.
-        self.bts = [
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS1),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS2)
-        ]
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        The reason why this is implemented in a separate method and not calling
-        LteSimulation.BtsConfig for each separate band is that configuring each
-        ssc cannot be done separately, as it is necessary to know which
-        carriers are on the same band in order to decide which RF outputs can
-        be shared in the test equipment.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-        # Validate the number of carriers.
-        if self.num_carriers > self.LTE_MAX_CARRIERS:
-            raise cc.CellularSimulatorError('The test equipment supports up '
-                                            'to {} carriers.'.format(
-                                                self.LTE_MAX_CARRIERS))
-
-        # Initialize the base stations in the test equipment
-        self.anritsu.set_simulation_model(
-            *[md8475a.BtsTechnology.LTE for _ in range(self.num_carriers)],
-            reset=False)
-
-        # If base stations use different bands, make sure that the RF cards are
-        # not being shared by setting the right maximum MIMO modes
-        if self.num_carriers == 2:
-            # RF cards are never shared when doing 2CA so 4X4 can be done in
-            # both base stations.
-            self.bts[0].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-        elif self.num_carriers == 3:
-            # 4X4 can only be done in the second base station if it is shared
-            # with the primary. If the RF cards cannot be shared, then at most
-            # 2X2 can be done.
-            self.bts[0].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            if bands[0] == bands[1]:
-                self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            else:
-                self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_2X2
-            self.bts[2].mimo_support = md8475a.LteMimoMode.MIMO_2X2
-        elif self.num_carriers > 3:
-            raise NotImplementedError('The controller doesn\'t implement more '
-                                      'than 3 carriers for MD8475B yet.')
-
-        # Enable carrier aggregation if there is more than one carrier
-        if self.num_carriers > 1:
-            self.anritsu.set_carrier_aggregation_enabled()
-
-        # Restart the simulation as changing the simulation model will stop it.
-        self.anritsu.start_simulation()
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        nrb_ul = int(self.bts[bts_index].nrb_ul)
-        max_nrb_ul = self.bts[bts_index].max_nrb_ul
-        input_level = str(
-            round(input_power - 10 * math.log10(nrb_ul / max_nrb_ul), 1))
-        if nrb_ul < max_nrb_ul:
-            self.log.info('Number of UL RBs ({}) is less than the maximum RB '
-                          'allocation ({}). Increasing UL reference power to '
-                          '{} dbm to compensate'.format(
-                              nrb_ul, max_nrb_ul, input_level))
-        self.bts[bts_index].input_level = input_level
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        self.bts[bts_index].output_level = output_power
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        # Temporarily adding this line to workaround a bug in the
-        # Anritsu callbox in which the channel number needs to be set
-        # to a different value before setting it to the final one.
-        self.bts[bts_index].dl_channel = str(int(channel_number + 1))
-        time.sleep(8)
-        self.bts[bts_index].dl_channel = str(int(channel_number))
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        if enabled and not self.LTE_SUPPORTS_DL_256QAM:
-            raise RuntimeError('256 QAM is not supported')
-        self.bts[bts_index].lte_dl_modulation_order = \
-            md8475a.ModulationType.Q256 if enabled else md8475a.ModulationType.Q64
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.bts[bts_index].lte_ul_modulation_order = \
-            md8475a.ModulationType.Q64 if enabled else md8475a.ModulationType.Q16
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        if mac_padding:
-            self.bts[bts_index].tbs_pattern = 'FULLALLOCATION'
-        else:
-            self.bts[bts_index].tbs_pattern = 'OFF'
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        self.anritsu.set_lte_rrc_status_change(enabled)
-        if enabled:
-            self.anritsu.set_lte_rrc_status_change_timer(time)
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        self.bts[bts_index].cfi = cfi
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        # TODO (b/146068532): implement.
-        self.bts[bts_index].paging_duration = cycle_duration
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.bts[bts_index].phich_resource = phich
-
-    def set_drx_connected_mode(self, bts_index, active):
-        """ Sets the DRX connected mode
-
-        Args:
-            bts_index: the base station number
-            active: Boolean indicating whether cDRX mode
-                is active
-        """
-        mode = 'MANUAL' if active else 'OFF'
-        self.bts[bts_index].drx_connected_mode = mode
-
-    def set_drx_on_duration_timer(self, bts_index, timer):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to wait and check for user data
-                after waking from the DRX cycle
-        """
-        self.bts[bts_index].drx_on_duration_timer = timer
-
-    def set_drx_inactivity_timer(self, bts_index, timer):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            timer: The time interval to wait before entering DRX mode
-        """
-        self.bts[bts_index].drx_inactivity_timer = timer
-
-    def set_drx_retransmission_timer(self, bts_index, timer):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to remain active
-
-        """
-        self.bts[bts_index].drx_retransmission_timer = timer
-
-    def set_drx_long_cycle(self, bts_index, cycle):
-        """ Sets the amount of subframes representing a DRX long cycle.
-
-        Args:
-            bts_index: the base station number
-            cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-        """
-        self.bts[bts_index].drx_long_cycle = cycle
-
-    def set_drx_long_cycle_offset(self, bts_index, offset):
-        """ Sets the offset used to determine the subframe number
-        to begin the long drx cycle
-
-        Args:
-            bts_index: the base station number
-            offset: Number in range 0 to (long cycle - 1)
-        """
-        self.bts[bts_index].drx_long_cycle_offset = offset
-
-    def set_band(self, bts_index, band):
-        """ Sets the right duplex mode before switching to a new band.
-
-        Args:
-            bts_index: the base station number
-            band: desired band
-        """
-        bts = self.bts[bts_index]
-
-        # The callbox won't restore the band-dependent default values if the
-        # request is to switch to the same band as the one the base station is
-        # currently using. To ensure that default values are restored, go to a
-        # different band before switching.
-        if int(bts.band) == band:
-            # Using bands 1 and 2 but it could be any others
-            bts.band = '1' if band != 1 else '2'
-            # Switching to config.band will be handled by the parent class
-            # implementation of this method.
-
-        bts.duplex_mode = self.get_duplex_mode(band).value
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-
-        if 33 <= int(band) <= 46:
-            return LteSimulation.DuplexMode.TDD
-        else:
-            return LteSimulation.DuplexMode.FDD
-
-    def set_tdd_config(self, bts_index, config):
-        """ Sets the frame structure for TDD bands.
-
-        Args:
-            bts_index: the base station number
-            config: the desired frame structure. An int between 0 and 6.
-        """
-
-        if not 0 <= config <= 6:
-            raise ValueError("The frame structure configuration has to be a "
-                             "number between 0 and 6")
-
-        self.bts[bts_index].uldl_configuration = config
-
-        # Wait for the setting to propagate
-        time.sleep(5)
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        # Cast to int in case it was passed as a string
-        ssf_config = int(ssf_config)
-
-        if not 0 <= ssf_config <= 9:
-            raise ValueError('The Special Sub-Frame configuration has to be a '
-                             'number between 0 and 9.')
-
-        self.bts[bts_index].tdd_special_subframe = ssf_config
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the LTE channel bandwidth (MHz)
-
-        Args:
-            bts_index: the base station number
-            bandwidth: desired bandwidth (MHz)
-        """
-        bts = self.bts[bts_index]
-
-        if bandwidth == 20:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_20MHz
-        elif bandwidth == 15:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_15MHz
-        elif bandwidth == 10:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_10MHz
-        elif bandwidth == 5:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_5MHz
-        elif bandwidth == 3:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_3MHz
-        elif bandwidth == 1.4:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_1dot4MHz
-        else:
-            msg = "Bandwidth = {} MHz is not valid for LTE".format(bandwidth)
-            self.log.error(msg)
-            raise ValueError(msg)
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def set_mimo_mode(self, bts_index, mimo):
-        """ Sets the number of DL antennas for the desired MIMO mode.
-
-        Args:
-            bts_index: the base station number
-            mimo: object of class MimoMode
-        """
-
-        bts = self.bts[bts_index]
-
-        # If the requested mimo mode is not compatible with the current TM,
-        # warn the user before changing the value.
-
-        if mimo == LteSimulation.MimoMode.MIMO_1x1:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM1,
-                    LteSimulation.TransmissionMode.TM7
-            ]:
-                self.log.warning(
-                    "Using only 1 DL antennas is not allowed with "
-                    "the current transmission mode. Changing the "
-                    "number of DL antennas will override this "
-                    "setting.")
-            bts.dl_antenna = 1
-        elif mimo == LteSimulation.MimoMode.MIMO_2x2:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM2,
-                    LteSimulation.TransmissionMode.TM3,
-                    LteSimulation.TransmissionMode.TM4,
-                    LteSimulation.TransmissionMode.TM8,
-                    LteSimulation.TransmissionMode.TM9
-            ]:
-                self.log.warning("Using two DL antennas is not allowed with "
-                                 "the current transmission mode. Changing the "
-                                 "number of DL antennas will override this "
-                                 "setting.")
-            bts.dl_antenna = 2
-        elif mimo == LteSimulation.MimoMode.MIMO_4x4 and \
-            self.LTE_SUPPORTS_4X4_MIMO:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM2,
-                    LteSimulation.TransmissionMode.TM3,
-                    LteSimulation.TransmissionMode.TM4,
-                    LteSimulation.TransmissionMode.TM9
-            ]:
-                self.log.warning("Using four DL antennas is not allowed with "
-                                 "the current transmission mode. Changing the "
-                                 "number of DL antennas will override this "
-                                 "setting.")
-
-            bts.dl_antenna = 4
-        else:
-            RuntimeError("The requested MIMO mode is not supported.")
-
-    def set_scheduling_mode(self, bts_index, scheduling, mcs_dl, mcs_ul,
-                            nrb_dl, nrb_ul):
-        """ Sets the scheduling mode for LTE
-
-        Args:
-            bts_index: the base station number
-            scheduling: DYNAMIC or STATIC scheduling (Enum list)
-            mcs_dl: Downlink MCS (only for STATIC scheduling)
-            mcs_ul: Uplink MCS (only for STATIC scheduling)
-            nrb_dl: Number of RBs for downlink (only for STATIC scheduling)
-            nrb_ul: Number of RBs for uplink (only for STATIC scheduling)
-        """
-
-        bts = self.bts[bts_index]
-        bts.lte_scheduling_mode = scheduling.value
-
-        if scheduling == LteSimulation.SchedulingMode.STATIC:
-
-            if not all([nrb_dl, nrb_ul, mcs_dl, mcs_ul]):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            bts.packet_rate = md8475a.BtsPacketRate.LTE_MANUAL
-            bts.lte_mcs_dl = mcs_dl
-            bts.lte_mcs_ul = mcs_ul
-            bts.nrb_dl = nrb_dl
-            bts.nrb_ul = nrb_ul
-
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-
-        # Trigger UE capability enquiry from network to get
-        # UE supported CA band combinations. Here freq_bands is a hex string.
-        self.anritsu.trigger_ue_capability_enquiry(ue_capability_enquiry)
-
-        testcase = self.anritsu.get_AnritsuTestCases()
-        # A bug in the instrument's software (b/139547391) requires the test
-        # procedure to be set to whatever was the previous configuration before
-        # setting it to MULTICELL.
-        testcase.procedure = md8475a.TestProcedure(testcase.procedure)
-        testcase.procedure = md8475a.TestProcedure.PROCEDURE_MULTICELL
-        testcase.power_control = md8475a.TestPowerControl.POWER_CONTROL_DISABLE
-        testcase.measurement_LTE = md8475a.TestMeasurement.MEASUREMENT_DISABLE
-
-        # Enable the secondary carrier base stations for CA
-        for bts_index in range(1, self.num_carriers):
-            self.bts[bts_index].dl_cc_enabled = True
-
-        self.anritsu.start_testcase()
-
-        retry_counter = 0
-        self.log.info("Waiting for the test case to start...")
-        time.sleep(5)
-
-        while self.anritsu.get_testcase_status() == "0":
-            retry_counter += 1
-            if retry_counter == 3:
-                raise RuntimeError(
-                    "The test case failed to start after {} "
-                    "retries. The connection between the phone "
-                    "and the base station might be unstable.".format(
-                        retry_counter))
-            time.sleep(10)
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the LTE basetation
-
-        Args:
-            bts_index: the base station number
-            tmode: Enum list from class 'TransmissionModeLTE'
-        """
-
-        bts = self.bts[bts_index]
-
-        # If the selected transmission mode does not support the number of DL
-        # antennas, throw an exception.
-        if (tmode in [
-                LteSimulation.TransmissionMode.TM1,
-                LteSimulation.TransmissionMode.TM7
-        ] and bts.dl_antenna != '1'):
-            # TM1 and TM7 only support 1 DL antenna
-            raise ValueError("{} allows only one DL antenna. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.".format(tmode.value))
-        elif (tmode == LteSimulation.TransmissionMode.TM8
-              and bts.dl_antenna != '2'):
-            # TM8 requires 2 DL antennas
-            raise ValueError("TM2 requires two DL antennas. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.")
-        elif (tmode in [
-                LteSimulation.TransmissionMode.TM2,
-                LteSimulation.TransmissionMode.TM3,
-                LteSimulation.TransmissionMode.TM4,
-                LteSimulation.TransmissionMode.TM9
-        ] and bts.dl_antenna == '1'):
-            # TM2, TM3, TM4 and TM9 require 2 or 4 DL antennas
-            raise ValueError("{} requires at least two DL atennas. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.".format(tmode.value))
-
-        # The TM mode is allowed for the current number of DL antennas, so it
-        # is safe to change this setting now
-        bts.transmode = tmode.value
-
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_registration_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone did not attach before '
-                                            'the timeout period ended.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_communication_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Communication state before '
-                                            'the timeout period ended.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_idle_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone was not in Idle state '
-                                            'before the time the timeout '
-                                            'period ended.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        if self.anritsu.get_smartstudio_status() == \
-            md8475a.ProcessingStatus.PROCESS_STATUS_NOTRUN.value:
-            self.log.info('Device cannot be detached because simulation is '
-                          'not running.')
-            return
-        self.anritsu.set_simulation_state_to_poweroff()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        self.anritsu.stop_simulation()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        try:
-            self.anritsu.start_ip_traffic()
-        except md8475a.AnritsuError as inst:
-            # This typically happens when traffic is already running.
-            # TODO (b/141962691): continue only if traffic is running
-            self.log.warning(str(inst))
-        time.sleep(4)
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        try:
-            self.anritsu.stop_ip_traffic()
-        except md8475a.AnritsuError as inst:
-            # This typically happens when traffic has already been stopped
-            # TODO (b/141962691): continue only if traffic is stopped
-            self.log.warning(str(inst))
-        time.sleep(2)
-
-    def get_measured_pusch_power(self):
-        """ Queries PUSCH power measured at the callbox.
-
-        Returns:
-            The PUSCH power in the primary input port.
-        """
-        # Try three times before raising an exception. This is needed because
-        # the callbox sometimes reports an active chain as 'DEACTIVE'.
-        retries_left = 3
-
-        while retries_left > 0:
-
-            ul_pusch = self.anritsu.get_measured_pusch_power().split(',')[0]
-
-            if ul_pusch != 'DEACTIVE':
-                return float(ul_pusch)
-
-            time.sleep(3)
-            retries_left -= 1
-            self.log.info('Chain shows as inactive. %d retries left.' %
-                          retries_left)
-
-        raise cc.CellularSimulatorError('Could not get measured PUSCH power.')
-
-
-class MD8475BCellularSimulator(MD8475CellularSimulator):
-
-    MD8475_VERSION = 'B'
-
-    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
-    LTE_SUPPORTS_DL_256QAM = True
-
-    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
-    LTE_SUPPORTS_UL_64QAM = True
-
-    # Indicates if 4x4 MIMO is supported for LTE
-    LTE_SUPPORTS_4X4_MIMO = True
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 4
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = -30
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-    LTE_BASIC_SIM_FILE = 'SIM_default_LTE.wnssp2'
-    LTE_BASIC_CELL_FILE = 'CELL_LTE_config.wnscp2'
-    LTE_CA_BASIC_SIM_FILE = 'SIM_LTE_CA.wnssp2'
-    LTE_CA_BASIC_CELL_FILE = 'CELL_LTE_CA_config.wnscp2'
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_CONFIG_PATH = 'C:\\Users\\MD8475B\\Documents\\DAN_configs\\'
-
-    def setup_lte_scenario(self):
-        """ The B model can support up to five carriers. """
-
-        super().setup_lte_scenario()
-
-        self.bts.extend([
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS3),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS4),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS5)
-        ])
diff --git a/src/antlion/controllers/anritsu_lib/md8475a.py b/src/antlion/controllers/anritsu_lib/md8475a.py
deleted file mode 100644
index ac67229..0000000
--- a/src/antlion/controllers/anritsu_lib/md8475a.py
+++ /dev/null
@@ -1,5066 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Controller interface for Anritsu Signalling Tester MD8475A.
-"""
-
-import logging
-import time
-import socket
-from enum import Enum
-from enum import IntEnum
-
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuError
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuUtils
-from antlion.controllers.anritsu_lib._anritsu_utils import NO_ERROR
-from antlion.controllers.anritsu_lib._anritsu_utils import OPERATION_COMPLETE
-
-from antlion import tracelogger
-
-TERMINATOR = "\0"
-
-# The following wait times (except COMMUNICATION_STATE_WAIT_TIME) are actually
-# the times for socket to time out. Increasing them is to make sure there is
-# enough time for MD8475A operation to be completed in some cases.
-# It won't increase test execution time.
-SMARTSTUDIO_LAUNCH_WAIT_TIME = 300  # was 90
-SMARTSTUDIO_SIMULATION_START_WAIT_TIME = 300  # was 120
-REGISTRATION_STATE_WAIT_TIME = 240
-LOAD_SIMULATION_PARAM_FILE_WAIT_TIME = 30
-COMMUNICATION_STATE_WAIT_TIME = 240
-ANRITSU_SOCKET_BUFFER_SIZE = 8192
-COMMAND_COMPLETE_WAIT_TIME = 180  # was 90
-SETTLING_TIME = 1
-WAIT_TIME_IDENTITY_RESPONSE = 5
-IDLE_STATE_WAIT_TIME = 240
-
-IMSI_READ_USERDATA_WCDMA = "081501"
-IMEI_READ_USERDATA_WCDMA = "081502"
-IMEISV_READ_USERDATA_WCDMA = "081503"
-IMSI_READ_USERDATA_LTE = "075501"
-IMEI_READ_USERDATA_LTE = "075502"
-IMEISV_READ_USERDATA_LTE = "075503"
-IMSI_READ_USERDATA_GSM = "081501"
-IMEI_READ_USERDATA_GSM = "081502"
-IMEISV_READ_USERDATA_GSM = "081503"
-IDENTITY_REQ_DATA_LEN = 24
-SEQ_LOG_MESSAGE_START_INDEX = 60
-
-WCDMA_BANDS = {
-    "I": "1",
-    "II": "2",
-    "III": "3",
-    "IV": "4",
-    "V": "5",
-    "VI": "6",
-    "VII": "7",
-    "VIII": "8",
-    "IX": "9",
-    "X": "10",
-    "XI": "11",
-    "XII": "12",
-    "XIII": "13",
-    "XIV": "14"
-}
-
-
-def create(configs):
-    objs = []
-    for c in configs:
-        ip_address = c["ip_address"]
-        objs.append(MD8475A(ip_address))
-    return objs
-
-
-def destroy(objs):
-    return
-
-
-class ProcessingStatus(Enum):
-    ''' MD8475A processing status for UE,Packet,Voice,Video,SMS,
-        PPP, PWS '''
-    PROCESS_STATUS_NONE = "NONE"
-    PROCESS_STATUS_NOTRUN = "NOTRUN"
-    PROCESS_STATUS_POWEROFF = "POWEROFF"
-    PROCESS_STATUS_REGISTRATION = "REGISTRATION"
-    PROCESS_STATUS_DETACH = "DETACH"
-    PROCESS_STATUS_IDLE = "IDLE"
-    PROCESS_STATUS_ORIGINATION = "ORIGINATION"
-    PROCESS_STATUS_HANDOVER = "HANDOVER"
-    PROCESS_STATUS_UPDATING = "UPDATING"
-    PROCESS_STATUS_TERMINATION = "TERMINATION"
-    PROCESS_STATUS_COMMUNICATION = "COMMUNICATION"
-    PROCESS_STATUS_UERELEASE = "UERELEASE"
-    PROCESS_STATUS_NWRELEASE = "NWRELEASE"
-
-
-class BtsNumber(Enum):
-    '''ID number for MD8475A supported BTS '''
-    BTS1 = "BTS1"
-    BTS2 = "BTS2"
-    BTS3 = "BTS3"
-    BTS4 = "BTS4"
-    BTS5 = "BTS5"
-
-
-class BtsTechnology(Enum):
-    ''' BTS system technology'''
-    LTE = "LTE"
-    WCDMA = "WCDMA"
-    TDSCDMA = "TDSCDMA"
-    GSM = "GSM"
-    CDMA1X = "CDMA1X"
-    EVDO = "EVDO"
-
-
-class BtsBandwidth(Enum):
-    ''' Values for Cell Bandwidth '''
-    LTE_BANDWIDTH_1dot4MHz = "1.4MHz"
-    LTE_BANDWIDTH_3MHz = "3MHz"
-    LTE_BANDWIDTH_5MHz = "5MHz"
-    LTE_BANDWIDTH_10MHz = "10MHz"
-    LTE_BANDWIDTH_15MHz = "15MHz"
-    LTE_BANDWIDTH_20MHz = "20MHz"
-
-    def get_float_value(bts_bandwidth):
-        """ Returns a float representing the bandwidth in MHz.
-
-        Args:
-            bts_bandwidth: a BtsBandwidth enum or a string matching one of the
-            values in the BtsBandwidth enum.
-        """
-
-        if isinstance(bts_bandwidth, BtsBandwidth):
-            bandwidth_str = bts_bandwidth.value
-        elif isinstance(bts_bandwidth, str):
-            bandwidth_str = bts_bandwidth
-        else:
-            raise TypeError('bts_bandwidth should be an instance of string or '
-                            'BtsBandwidth. ')
-
-        if bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_20MHz.value:
-            return 20
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_15MHz.value:
-            return 15
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_10MHz.value:
-            return 10
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_5MHz.value:
-            return 5
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_3MHz.value:
-            return 3
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value:
-            return 1.4
-        else:
-            raise ValueError(
-                'Could not map {} to a bandwidth value.'.format(bandwidth_str))
-
-
-MAX_NRB_FOR_BANDWIDTH = {
-    BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value: 6,
-    BtsBandwidth.LTE_BANDWIDTH_3MHz.value: 15,
-    BtsBandwidth.LTE_BANDWIDTH_5MHz.value: 25,
-    BtsBandwidth.LTE_BANDWIDTH_10MHz.value: 50,
-    BtsBandwidth.LTE_BANDWIDTH_15MHz.value: 75,
-    BtsBandwidth.LTE_BANDWIDTH_20MHz.value: 100
-}
-
-
-class LteMimoMode(Enum):
-    """ Values for LTE MIMO modes. """
-    NONE = "MIMONOT"
-    MIMO_2X2 = "MIMO2X2"
-    MIMO_4X4 = "MIMO4X4"
-
-
-class BtsGprsMode(Enum):
-    ''' Values for Gprs Modes '''
-    NO_GPRS = "NO_GPRS"
-    GPRS = "GPRS"
-    EGPRS = "EGPRS"
-
-
-class BtsPacketRate(Enum):
-    ''' Values for Cell Packet rate '''
-    LTE_MANUAL = "MANUAL"
-    LTE_BESTEFFORT = "BESTEFFORT"
-    WCDMA_DL384K_UL64K = "DL384K_UL64K"
-    WCDMA_DLHSAUTO_REL7_UL384K = "DLHSAUTO_REL7_UL384K"
-    WCDMA_DL18_0M_UL384K = "DL18_0M_UL384K"
-    WCDMA_DL21_6M_UL384K = "DL21_6M_UL384K"
-    WCDMA_DLHSAUTO_REL7_ULHSAUTO = "DLHSAUTO_REL7_ULHSAUTO"
-    WCDMA_DL18_0M_UL1_46M = "DL18_0M_UL1_46M"
-    WCDMA_DL18_0M_UL2_0M = "DL18_0M_UL2_0M"
-    WCDMA_DL18_0M_UL5_76M = "DL18_0M_UL5_76M"
-    WCDMA_DL21_6M_UL1_46M = "DL21_6M_UL1_46M"
-    WCDMA_DL21_6M_UL2_0M = "DL21_6M_UL2_0M"
-    WCDMA_DL21_6M_UL5_76M = "DL21_6M_UL5_76M"
-    WCDMA_DLHSAUTO_REL8_UL384K = "DLHSAUTO_REL8_UL384K"
-    WCDMA_DL23_4M_UL384K = "DL23_4M_UL384K"
-    WCDMA_DL28_0M_UL384K = "DL28_0M_UL384K"
-    WCDMA_DL36_0M_UL384K = "DL36_0M_UL384K"
-    WCDMA_DL43_2M_UL384K = "DL43_2M_UL384K"
-    WCDMA_DLHSAUTO_REL8_ULHSAUTO = "DLHSAUTO_REL8_ULHSAUTO"
-    WCDMA_DL23_4M_UL1_46M = "DL23_4M_UL1_46M"
-    WCDMA_DL23_4M_UL2_0M = "DL23_4M_UL2_0M"
-    WCDMA_DL23_4M_UL5_76M = "DL23_4M_UL5_76M"
-    WCDMA_DL28_0M_UL1_46M = "DL28_0M_UL1_46M"
-    WCDMA_DL28_0M_UL2_0M = "DL28_0M_UL2_0M"
-    WCDMA_DL28_0M_UL5_76M = "L28_0M_UL5_76M"
-    WCDMA_DL36_0M_UL1_46M = "DL36_0M_UL1_46M"
-    WCDMA_DL36_0M_UL2_0M = "DL36_0M_UL2_0M"
-    WCDMA_DL36_0M_UL5_76M = "DL36_0M_UL5_76M"
-    WCDMA_DL43_2M_UL1_46M = "DL43_2M_UL1_46M"
-    WCDMA_DL43_2M_UL2_0M = "DL43_2M_UL2_0M"
-    WCDMA_DL43_2M_UL5_76M = "DL43_2M_UL5_76M"
-
-
-class BtsPacketWindowSize(Enum):
-    ''' Values for Cell Packet window size '''
-    WINDOW_SIZE_1 = 1
-    WINDOW_SIZE_8 = 8
-    WINDOW_SIZE_16 = 16
-    WINDOW_SIZE_32 = 32
-    WINDOW_SIZE_64 = 64
-    WINDOW_SIZE_128 = 128
-    WINDOW_SIZE_256 = 256
-    WINDOW_SIZE_512 = 512
-    WINDOW_SIZE_768 = 768
-    WINDOW_SIZE_1024 = 1024
-    WINDOW_SIZE_1536 = 1536
-    WINDOW_SIZE_2047 = 2047
-
-
-class BtsServiceState(Enum):
-    ''' Values for BTS service state '''
-    SERVICE_STATE_IN = "IN"
-    SERVICE_STATE_OUT = "OUT"
-
-
-class BtsCellBarred(Enum):
-    ''' Values for Cell barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    BARRED = "BARRED"
-
-
-class BtsAccessClassBarred(Enum):
-    ''' Values for Access class barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    EMERGENCY = "EMERGENCY"
-    BARRED = "BARRED"
-    USERSPECIFIC = "USERSPECIFIC"
-
-
-class BtsLteEmergencyAccessClassBarred(Enum):
-    ''' Values for Lte emergency access class barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    BARRED = "BARRED"
-
-
-class BtsNwNameEnable(Enum):
-    ''' Values for BT network name enable parameter '''
-    NAME_ENABLE = "ON"
-    NAME_DISABLE = "OFF"
-
-
-class IPAddressType(Enum):
-    ''' Values for IP address type '''
-    IPV4 = "IPV4"
-    IPV6 = "IPV6"
-    IPV4V6 = "IPV4V6"
-
-
-class TriggerMessageIDs(Enum):
-    ''' ID for Trigger messages  '''
-    RRC_CONNECTION_REQ = 111101
-    RRC_CONN_REESTABLISH_REQ = 111100
-    ATTACH_REQ = 141141
-    DETACH_REQ = 141145
-    MM_LOC_UPDATE_REQ = 221108
-    GMM_ATTACH_REQ = 241101
-    GMM_RA_UPDATE_REQ = 241108
-    IDENTITY_REQUEST_LTE = 141155
-    IDENTITY_REQUEST_WCDMA = 241115
-    IDENTITY_REQUEST_GSM = 641115
-    UE_CAPABILITY_ENQUIRY = 111167
-
-
-class TriggerMessageReply(Enum):
-    ''' Values for Trigger message reply parameter '''
-    ACCEPT = "ACCEPT"
-    REJECT = "REJECT"
-    IGNORE = "IGNORE"
-    NONE = "NONE"
-    ILLEGAL = "ILLEGAL"
-
-
-class TestProcedure(Enum):
-    ''' Values for different Test procedures in MD8475A '''
-    PROCEDURE_BL = "BL"
-    PROCEDURE_SELECTION = "SELECTION"
-    PROCEDURE_RESELECTION = "RESELECTION"
-    PROCEDURE_REDIRECTION = "REDIRECTION"
-    PROCEDURE_HO = "HO"
-    PROCEDURE_HHO = "HHO"
-    PROCEDURE_SHO = "SHO"
-    PROCEDURE_MEASUREMENT = "MEASUREMENT"
-    PROCEDURE_CELLCHANGE = "CELLCHANGE"
-    PROCEDURE_MULTICELL = "MULTICELL"
-
-
-class TestPowerControl(Enum):
-    ''' Values for power control in test procedure '''
-    POWER_CONTROL_ENABLE = "ENABLE"
-    POWER_CONTROL_DISABLE = "DISABLE"
-
-
-class TestMeasurement(Enum):
-    ''' Values for mesaurement in test procedure '''
-    MEASUREMENT_ENABLE = "ENABLE"
-    MEASUREMENT_DISABLE = "DISABLE"
-
-
-'''MD8475A processing states'''
-_PROCESS_STATES = {
-    "NONE": ProcessingStatus.PROCESS_STATUS_NONE,
-    "NOTRUN": ProcessingStatus.PROCESS_STATUS_NOTRUN,
-    "POWEROFF": ProcessingStatus.PROCESS_STATUS_POWEROFF,
-    "REGISTRATION": ProcessingStatus.PROCESS_STATUS_REGISTRATION,
-    "DETACH": ProcessingStatus.PROCESS_STATUS_DETACH,
-    "IDLE": ProcessingStatus.PROCESS_STATUS_IDLE,
-    "ORIGINATION": ProcessingStatus.PROCESS_STATUS_ORIGINATION,
-    "HANDOVER": ProcessingStatus.PROCESS_STATUS_HANDOVER,
-    "UPDATING": ProcessingStatus.PROCESS_STATUS_UPDATING,
-    "TERMINATION": ProcessingStatus.PROCESS_STATUS_TERMINATION,
-    "COMMUNICATION": ProcessingStatus.PROCESS_STATUS_COMMUNICATION,
-    "UERELEASE": ProcessingStatus.PROCESS_STATUS_UERELEASE,
-    "NWRELEASE": ProcessingStatus.PROCESS_STATUS_NWRELEASE,
-}
-
-
-class ImsCscfStatus(Enum):
-    """ MD8475A ims cscf status for UE
-    """
-    OFF = "OFF"
-    SIPIDLE = "SIPIDLE"
-    CONNECTED = "CONNECTED"
-    CALLING = "CALLING"
-    RINGING = "RINGING"
-    UNKNOWN = "UNKNOWN"
-
-
-class ImsCscfCall(Enum):
-    """ MD8475A ims cscf call action
-    """
-    MAKE = "MAKE"
-    END = "END"
-    MAKEVIDEO = "MAKEVIDEO"
-    MAKE2ND = "MAKE2ND"
-    END2ND = "END2ND"
-    ANSWER = "ANSWER"
-    HOLD = "HOLD"
-    RESUME = "RESUME"
-
-
-class VirtualPhoneStatus(IntEnum):
-    ''' MD8475A virtual phone status for UE voice and UE video
-        PPP, PWS '''
-    STATUS_IDLE = 0
-    STATUS_VOICECALL_ORIGINATION = 1
-    STATUS_VOICECALL_INCOMING = 2
-    STATUS_VOICECALL_INPROGRESS = 3
-    STATUS_VOICECALL_DISCONNECTING = 4
-    STATUS_VOICECALL_DISCONNECTED = 5
-    STATUS_VIDEOCALL_ORIGINATION = 6
-    STATUS_VIDEOCALL_INCOMING = 7
-    STATUS_VIDEOCALL_INPROGRESS = 8
-    STATUS_VIDEOCALL_DISCONNECTING = 9
-    STATUS_VIDEOCALL_DISCONNECTED = 10
-
-
-'''Virtual Phone Status '''
-_VP_STATUS = {
-    "0": VirtualPhoneStatus.STATUS_IDLE,
-    "1": VirtualPhoneStatus.STATUS_VOICECALL_ORIGINATION,
-    "2": VirtualPhoneStatus.STATUS_VOICECALL_INCOMING,
-    "3": VirtualPhoneStatus.STATUS_VOICECALL_INPROGRESS,
-    "4": VirtualPhoneStatus.STATUS_VOICECALL_DISCONNECTING,
-    "5": VirtualPhoneStatus.STATUS_VOICECALL_DISCONNECTED,
-    "6": VirtualPhoneStatus.STATUS_VIDEOCALL_ORIGINATION,
-    "7": VirtualPhoneStatus.STATUS_VIDEOCALL_INCOMING,
-    "8": VirtualPhoneStatus.STATUS_VIDEOCALL_INPROGRESS,
-    "9": VirtualPhoneStatus.STATUS_VIDEOCALL_DISCONNECTING,
-    "10": VirtualPhoneStatus.STATUS_VIDEOCALL_DISCONNECTED,
-}
-
-
-class VirtualPhoneAutoAnswer(Enum):
-    ''' Virtual phone auto answer enable values'''
-    ON = "ON"
-    OFF = "OFF"
-
-
-class CsfbType(Enum):
-    ''' CSFB Type values'''
-    CSFB_TYPE_REDIRECTION = "REDIRECTION"
-    CSFB_TYPE_HANDOVER = "HO"
-
-
-class ReturnToEUTRAN(Enum):
-    '''Return to EUTRAN setting values '''
-    RETEUTRAN_ENABLE = "ENABLE"
-    RETEUTRAN_DISABLE = "DISABLE"
-
-
-class CTCHSetup(Enum):
-    '''CTCH setting values '''
-    CTCH_ENABLE = "ENABLE"
-    CTCH_DISABLE = "DISABLE"
-
-
-class UEIdentityType(Enum):
-    '''UE Identity type values '''
-    IMSI = "IMSI"
-    IMEI = "IMEI"
-    IMEISV = "IMEISV"
-
-
-class CBCHSetup(Enum):
-    '''CBCH setting values '''
-    CBCH_ENABLE = "ENABLE"
-    CBCH_DISABLE = "DISABLE"
-
-
-class Switch(Enum):
-    ''' Values for ENABLE or DISABLE '''
-    ENABLE = "ENABLE"
-    DISABLE = "DISABLE"
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    Q16 = '16QAM'
-    Q64 = '64QAM'
-    Q256 = '256QAM'
-
-
-class MD8475A(object):
-    """Class to communicate with Anritsu MD8475A Signalling Tester.
-       This uses GPIB command to interface with Anritsu MD8475A """
-    def __init__(self, ip_address, wlan=False, md8475_version="A"):
-        self._error_reporting = True
-        self._ipaddr = ip_address
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-        self._wlan = wlan
-        port_number = 28002
-        self._md8475_version = md8475_version
-        if md8475_version == "B":
-            global TERMINATOR
-            TERMINATOR = "\n"
-            port_number = 5025
-
-        # Open socket connection to Signaling Tester
-        self.log.info("Opening Socket Connection with "
-                      "Signaling Tester ({}) ".format(self._ipaddr))
-        try:
-            self._sock = socket.create_connection((self._ipaddr, port_number),
-                                                  timeout=120)
-            self.send_query("*IDN?", 60)
-            self.log.info("Communication with Signaling Tester OK.")
-            self.log.info("Opened Socket connection to ({})"
-                          "with handle ({})".format(self._ipaddr, self._sock))
-            # launching Smart Studio Application needed for the simulation
-            ret = self.launch_smartstudio()
-        except socket.timeout:
-            raise AnritsuError("Timeout happened while conencting to"
-                               " Anritsu MD8475A")
-        except socket.error:
-            raise AnritsuError("Socket creation error")
-
-    def get_BTS(self, btsnumber):
-        """ Returns the BTS object based on the BTS number provided
-
-        Args:
-            btsnumber: BTS number (BTS1, BTS2)
-
-        Returns:
-            BTS object
-        """
-        return _BaseTransceiverStation(self, btsnumber)
-
-    def get_AnritsuTestCases(self):
-        """ Returns the Anritsu Test Case Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Test Case Module Object
-        """
-        return _AnritsuTestCases(self)
-
-    def get_VirtualPhone(self):
-        """ Returns the Anritsu Virtual Phone Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Virtual Phone Module Object
-        """
-        return _VirtualPhone(self)
-
-    def get_PDN(self, pdn_number):
-        """ Returns the PDN Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu PDN Module Object
-        """
-        return _PacketDataNetwork(self, pdn_number)
-
-    def get_TriggerMessage(self):
-        """ Returns the Anritsu Trigger Message Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Trigger Message Module Object
-        """
-        return _TriggerMessage(self)
-
-    def get_IMS(self, vnid):
-        """ Returns the IMS Module Object with VNID
-
-        Args:
-            vnid: Virtual Network ID
-
-        Returns:
-            Anritsu IMS VNID Module Object
-        """
-        return _IMS_Services(self, vnid)
-
-    def get_ims_cscf_status(self, virtual_network_id):
-        """ Get the IMS CSCF Status of virtual network
-
-        Args:
-            virtual_network_id: virtual network id
-
-        Returns:
-            IMS CSCF status
-        """
-        cmd = "IMSCSCFSTAT? {}".format(virtual_network_id)
-        return self.send_query(cmd)
-
-    def ims_cscf_call_action(self, virtual_network_id, action):
-        """ IMS CSCF Call action
-
-        Args:
-            virtual_network_id: virtual network id
-            action: action to make
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFCALL {},{}".format(virtual_network_id, action)
-        self.send_command(cmd)
-
-    def send_query(self, query, sock_timeout=120):
-        """ Sends a Query message to Anritsu and return response
-
-        Args:
-            query - Query string
-
-        Returns:
-            query response
-        """
-        self.log.info("--> {}".format(query))
-        querytoSend = (query + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(querytoSend)
-            result = self._sock.recv(ANRITSU_SOCKET_BUFFER_SIZE).rstrip(
-                TERMINATOR.encode('utf-8'))
-            response = result.decode('utf-8')
-            self.log.info('<-- {}'.format(response))
-            return response
-        except socket.timeout:
-            raise AnritsuError("Timeout: Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error")
-
-    def send_command(self, command, sock_timeout=120):
-        """ Sends a Command message to Anritsu
-
-        Args:
-            command - command string
-
-        Returns:
-            None
-        """
-        self.log.info("--> {}".format(command))
-        if self._error_reporting:
-            cmdToSend = (command + ";ERROR?" + TERMINATOR).encode('utf-8')
-            self._sock.settimeout(sock_timeout)
-            try:
-                self._sock.send(cmdToSend)
-                err = self._sock.recv(ANRITSU_SOCKET_BUFFER_SIZE).rstrip(
-                    TERMINATOR.encode('utf-8'))
-                error = int(err.decode('utf-8'))
-                if error != NO_ERROR:
-                    raise AnritsuError(error, command)
-            except socket.timeout:
-                raise AnritsuError("Timeout for Command Response from Anritsu")
-            except socket.error:
-                raise AnritsuError("Socket Error for Anritsu command")
-            except Exception as e:
-                raise AnritsuError(e, command)
-        else:
-            cmdToSend = (command + TERMINATOR).encode('utf-8')
-            try:
-                self._sock.send(cmdToSend)
-            except socket.error:
-                raise AnritsuError("Socket Error", command)
-            return
-
-    def launch_smartstudio(self):
-        """ launch the Smart studio application
-            This should be done before stating simulation
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        # check the Smart Studio status . If Smart Studio doesn't exist ,
-        # start it.if it is running, stop it. Smart Studio should be in
-        # NOTRUN (Simulation Stopped) state to start new simulation
-        stat = self.send_query("STAT?", 30)
-        if stat == "NOTEXIST":
-            self.log.info("Launching Smart Studio Application,"
-                          "it takes about a minute.")
-            time_to_wait = SMARTSTUDIO_LAUNCH_WAIT_TIME
-            sleep_interval = 15
-            waiting_time = 0
-
-            err = self.send_command("RUN", SMARTSTUDIO_LAUNCH_WAIT_TIME)
-            stat = self.send_query("STAT?")
-            while stat != "NOTRUN":
-                time.sleep(sleep_interval)
-                waiting_time = waiting_time + sleep_interval
-                if waiting_time <= time_to_wait:
-                    stat = self.send_query("STAT?")
-                else:
-                    raise AnritsuError("Timeout: Smart Studio launch")
-        elif stat == "RUNNING":
-            # Stop simulation if necessary
-            self.send_command("STOP", 60)
-            stat = self.send_query("STAT?")
-
-        # The state of the Smart Studio should be NOTRUN at this point
-        # after the one of the steps from above
-        if stat != "NOTRUN":
-            self.log.info(
-                "Can not launch Smart Studio, "
-                "please shut down all the Smart Studio SW components")
-            raise AnritsuError("Could not run SmartStudio")
-
-    def close_smartstudio(self):
-        """ Closes the Smart studio application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        self.send_command("EXIT", 60)
-
-    def get_smartstudio_status(self):
-        """ Gets the Smart studio status
-
-        Args:
-            None
-
-        Returns:
-            Smart studio status
-        """
-        return self.send_query("STAT?")
-
-    def start_simulation(self):
-        """ Starting the simulation of the network model.
-            simulation model or simulation parameter file
-            should be set before starting the simulation
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        time_to_wait = SMARTSTUDIO_SIMULATION_START_WAIT_TIME
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.send_command("START", SMARTSTUDIO_SIMULATION_START_WAIT_TIME)
-
-        self.log.info("Waiting for CALLSTAT=POWEROFF")
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[0] != "POWEROFF":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("Timeout: Starting simulation")
-
-    def stop_simulation(self):
-        """ Stop simulation operation
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        # Stop virtual network (IMS) #1 if still running
-        # this is needed before Sync command is supported in 6.40a
-        if self.send_query("IMSVNSTAT? 1") == "RUNNING":
-            self.send_command("IMSSTOPVN 1")
-        if self.send_query("IMSVNSTAT? 2") == "RUNNING":
-            self.send_command("IMSSTOPVN 2")
-        stat = self.send_query("STAT?")
-        # Stop simulation if its is RUNNING
-        if stat == "RUNNING":
-            self.send_command("STOP", 60)
-            stat = self.send_query("STAT?")
-            if stat != "NOTRUN":
-                self.log.info("Failed to stop simulation")
-                raise AnritsuError("Failed to stop simulation")
-
-    def reset(self):
-        """ reset simulation parameters
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("*RST", COMMAND_COMPLETE_WAIT_TIME)
-
-    def load_simulation_paramfile(self, filepath):
-        """ loads simulation model parameter file
-        Args:
-          filepath : simulation model parameter file path
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        cmd = "LOADSIMPARAM \"" + filepath + '\";ERROR?'
-        self.send_query(cmd, LOAD_SIMULATION_PARAM_FILE_WAIT_TIME)
-
-    def load_cell_paramfile(self, filepath):
-        """ loads cell model parameter file
-
-        Args:
-          filepath : cell model parameter file path
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        cmd = "LOADCELLPARAM \"" + filepath + '\";ERROR?'
-        status = int(self.send_query(cmd))
-        if status != NO_ERROR:
-            raise AnritsuError(status, cmd)
-
-    def _set_simulation_model(self, sim_model, reset=True):
-        """ Set simulation model and valid the configuration
-
-        Args:
-            sim_model: simulation model
-            reset: if True, reset the simulation after setting the new
-            simulation model
-        Returns:
-            True/False
-        """
-        error = int(
-            self.send_query("SIMMODEL %s;ERROR?" % sim_model,
-                            COMMAND_COMPLETE_WAIT_TIME))
-        if error:  # Try again if first set SIMMODEL fails
-            time.sleep(3)
-            if "WLAN" in sim_model:
-                new_sim_model = sim_model[:-5]
-                error = int(
-                    self.send_query("SIMMODEL %s;ERROR?" % new_sim_model,
-                                    COMMAND_COMPLETE_WAIT_TIME))
-                time.sleep(3)
-            error = int(
-                self.send_query("SIMMODEL %s;ERROR?" % sim_model,
-                                COMMAND_COMPLETE_WAIT_TIME))
-            if error:
-                return False
-        if reset:
-            # Reset might be necessary because SIMMODEL will load
-            # some of the contents from previous parameter files.
-            self.reset()
-        return True
-
-    def set_simulation_model(self, *bts_rats, reset=True):
-        """ Stops the simulation and then sets the simulation model.
-
-        Args:
-            *bts_rats: base station rats for BTS 1 to 5.
-            reset: if True, reset the simulation after setting the new
-            simulation model
-        Returns:
-            True or False
-        """
-        self.stop_simulation()
-        if len(bts_rats) not in range(1, 6):
-            raise ValueError(
-                "set_simulation_model requires 1 to 5 BTS values.")
-        simmodel = ",".join(bts_rat.value for bts_rat in bts_rats)
-        if self._wlan:
-            simmodel = simmodel + "," + "WLAN"
-        return self._set_simulation_model(simmodel, reset)
-
-    def get_simulation_model(self):
-        """ Gets the simulation model
-
-        Args:
-            None
-
-        Returns:
-            Current simulation model
-        """
-        cmd = "SIMMODEL?"
-        return self.send_query(cmd)
-
-    def get_lte_rrc_status_change(self):
-        """ Gets the LTE RRC status change function state
-
-        Returns:
-            Boolean: True is Enabled / False is Disabled
-        """
-        cmd = "L_RRCSTAT?"
-        return self.send_query(cmd) == "ENABLE"
-
-    def set_lte_rrc_status_change(self, status_change):
-        """ Enables or Disables the LTE RRC status change function
-
-        Returns:
-            None
-        """
-        cmd = "L_RRCSTAT "
-        if status_change:
-            cmd += "ENABLE"
-        else:
-            cmd += "DISABLE"
-        self.send_command(cmd)
-
-    def get_lte_rrc_status_change_timer(self):
-        """ Gets the LTE RRC Status Change Timer
-
-        Returns:
-            returns a status change timer integer value
-        """
-        cmd = "L_STATTMR?"
-        return self.send_query(cmd)
-
-    def set_lte_rrc_status_change_timer(self, time):
-        """ Sets the LTE RRC Status Change Timer parameter
-
-        Returns:
-            None
-        """
-        cmd = "L_STATTMR %s" % time
-        self.send_command(cmd)
-
-    def set_umts_rrc_status_change(self, status_change):
-        """ Enables or Disables the UMTS RRC status change function
-
-        Returns:
-            None
-        """
-        cmd = "W_RRCSTAT "
-        if status_change:
-            cmd += "ENABLE"
-        else:
-            cmd += "DISABLE"
-        self.send_command(cmd)
-
-    def get_umts_rrc_status_change(self):
-        """ Gets the UMTS RRC Status Change
-
-        Returns:
-            Boolean: True is Enabled / False is Disabled
-        """
-        cmd = "W_RRCSTAT?"
-        return self.send_query(cmd)
-
-    def set_umts_dch_stat_timer(self, timer_seconds):
-        """ Sets the UMTS RRC DCH timer
-
-        Returns:
-            None
-        """
-        cmd = "W_STATTMRDCH %s" % timer_seconds
-        self.send_command(cmd)
-
-    def set_simulation_state_to_poweroff(self):
-        """ Sets the simulation state to POWER OFF
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("RESETSIMULATION POWEROFF")
-        time_to_wait = 30
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=POWEROFF")
-        callstat = self.send_query("CALLSTAT?").split(",")
-        while callstat[0] != "POWEROFF":
-            time.sleep(sleep_interval)
-            waiting_time = waiting_time + sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT?").split(",")
-            else:
-                break
-
-    def set_simulation_state_to_idle(self, btsnumber):
-        """ Sets the simulation state to IDLE
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        if not isinstance(btsnumber, BtsNumber):
-            raise ValueError(' The parameter should be of type "BtsNumber" ')
-        cmd = "RESETSIMULATION IDLE," + btsnumber.value
-        self.send_command(cmd)
-        time_to_wait = 30
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=IDLE")
-        callstat = self.send_query("CALLSTAT?").split(",")
-        while callstat[0] != "IDLE":
-            time.sleep(sleep_interval)
-            waiting_time = waiting_time + sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT?").split(",")
-            else:
-                break
-
-    def set_trigger_message_mode(self, msg_id):
-        """ Sets the Message Mode of the trigger
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        cmd = "TMMESSAGEMODE {},USERDATA".format(msg_id)
-        self.send_command(cmd)
-
-    def set_data_of_trigger_message(self, msg_id, user_data):
-        """ Sets the User Data of the trigger message
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-            user_data: Hex data
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        data_len = len(user_data) * 4
-
-        cmd = "TMUSERDATA {}, {}, {}".format(msg_id, user_data, data_len)
-        self.send_command(cmd)
-
-    def send_trigger_message(self, msg_id):
-        """ Sends the User Data of the trigger information
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        cmd = "TMSENDUSERMSG {}".format(msg_id)
-        self.send_command(cmd)
-
-    def wait_for_registration_state(self,
-                                    bts=1,
-                                    time_to_wait=REGISTRATION_STATE_WAIT_TIME):
-        """ Waits for UE registration state on Anritsu
-
-        Args:
-          bts: index of MD8475A BTS, eg 1, 2
-          time_to_wait: time to wait for the phone to get to registration state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for IDLE/COMMUNICATION state on anritsu.")
-
-        sleep_interval = 1
-        sim_model = (self.get_simulation_model()).split(",")
-        # wait 1 more round for GSM because of PS attach
-        registration_check_iterations = 2 if sim_model[bts - 1] == "GSM" else 1
-        for _ in range(registration_check_iterations):
-            waiting_time = 0
-            while waiting_time <= time_to_wait:
-                callstat = self.send_query(
-                    "CALLSTAT? BTS{}".format(bts)).split(",")
-                if callstat[0] == "IDLE" or callstat[1] == "COMMUNICATION":
-                    break
-                time.sleep(sleep_interval)
-                waiting_time += sleep_interval
-            else:
-                raise AnritsuError(
-                    "UE failed to register in {} seconds".format(time_to_wait))
-            time.sleep(sleep_interval)
-
-    def wait_for_communication_state(
-            self, time_to_wait=COMMUNICATION_STATE_WAIT_TIME):
-        """ Waits for UE communication state on Anritsu
-
-        Args:
-          time_to_wait: time to wait for the phone to get to communication state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for COMMUNICATION state on anritsu")
-        sleep_interval = 1
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=COMMUNICATION")
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[1] != "COMMUNICATION":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("UE failed to register on network")
-
-    def wait_for_idle_state(self, time_to_wait=IDLE_STATE_WAIT_TIME):
-        """ Waits for UE idle state on Anritsu
-
-        Args:
-          time_to_wait: time to wait for the phone to get to idle state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for IDLE state on anritsu.")
-
-        sleep_interval = 1
-        waiting_time = 0
-
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[0] != "IDLE":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("UE failed to go on idle state")
-
-    def get_camping_cell(self):
-        """ Gets the current camping cell information
-
-        Args:
-          None
-
-        Returns:
-            returns a tuple (BTS number, RAT Technology) '
-        """
-        bts_number, rat_info = self.send_query("CAMPINGCELL?").split(",")
-        return bts_number, rat_info
-
-    def get_supported_bands(self, rat):
-        """ Gets the supported bands from UE capability information
-
-        Args:
-          rat: LTE or WCDMA
-
-        Returns:
-            returns a list of bnads
-        """
-        cmd = "UEINFO? "
-        if rat == "LTE":
-            cmd += "L"
-        elif rat == "WCDMA":
-            cmd += "W"
-        else:
-            raise ValueError('The rat argument needs to be "LTE" or "WCDMA"')
-        cmd += "_SupportedBand"
-        result = self.send_query(cmd).split(",")
-        if result == "NONE":
-            return None
-        if rat == "WCDMA":
-            bands = []
-            for band in result:
-                bands.append(WCDMA_BANDS[band])
-            return bands
-        else:
-            return result
-
-    def start_testcase(self):
-        """ Starts a test case on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("STARTTEST")
-
-    def get_testcase_status(self):
-        """ Gets the current test case status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current test case status
-        """
-        return self.send_query("TESTSTAT?")
-
-    def start_ip_traffic(self, pdn='1'):
-        """ Starts IP data traffic with the selected PDN.
-
-        Args:
-            pdn: the pdn to be used for data traffic. Defaults to '1'.
-        """
-        self.send_command('OPERATEIPTRAFFIC START,' + pdn)
-
-    def stop_ip_traffic(self, pdn='1'):
-        """ Stops IP data traffic with the selected PDN.
-
-         Args:
-            pdn: pdn for which data traffic has to be stopped. Defaults to '1'.
-        """
-        self.send_command('OPERATEIPTRAFFIC STOP,' + pdn)
-
-    def set_carrier_aggregation_enabled(self, enabled=True):
-        """ Enables or disables de carrier aggregation option.
-
-        Args:
-            enabled: enables CA if True and disables CA if False.
-        """
-        cmd = 'CA ' + 'ENABLE' if enabled else 'DISABLE'
-        self.send_command(cmd)
-
-    # Common Default Gateway:
-    @property
-    def gateway_ipv4addr(self):
-        """ Gets the IPv4 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        return self.send_query("DGIPV4?")
-
-    @gateway_ipv4addr.setter
-    def gateway_ipv4addr(self, ipv4_addr):
-        """ sets the IPv4 address of the default gateway
-        Args:
-            ipv4_addr: IPv4 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "DGIPV4 " + ipv4_addr
-        self.send_command(cmd)
-
-    @property
-    def gateway_ipv6addr(self):
-        """ Gets the IPv6 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        return self.send_query("DGIPV6?")
-
-    @gateway_ipv6addr.setter
-    def gateway_ipv6addr(self, ipv6_addr):
-        """ sets the IPv6 address of the default gateway
-        Args:
-            ipv6_addr: IPv6 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "DGIPV6 " + ipv6_addr
-        self.send_command(cmd)
-
-    @property
-    def usim_key(self):
-        """ Gets the USIM Security Key
-
-        Args:
-          None
-
-        Returns:
-            USIM Security Key
-        """
-        return self.send_query("USIMK?")
-
-    @usim_key.setter
-    def usim_key(self, usimk):
-        """ sets the USIM Security Key
-        Args:
-            usimk: USIM Security Key, eg "000102030405060708090A0B0C0D0E0F"
-
-        Returns:
-            None
-        """
-        cmd = "USIMK " + usimk
-        self.send_command(cmd)
-
-    def get_ue_status(self):
-        """ Gets the current UE status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        UE_STATUS_INDEX = 0
-        ue_status = self.send_query("CALLSTAT?").split(",")[UE_STATUS_INDEX]
-        return _PROCESS_STATES[ue_status]
-
-    def get_packet_status(self):
-        """ Gets the current Packet status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current Packet status
-        """
-        PACKET_STATUS_INDEX = 1
-        packet_status = self.send_query("CALLSTAT?").split(
-            ",")[PACKET_STATUS_INDEX]
-        return _PROCESS_STATES[packet_status]
-
-    def disconnect(self):
-        """ Disconnect the Anritsu box from test PC
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        # no need to # exit smart studio application
-        # self.close_smartstudio()
-        self._sock.close()
-
-    def machine_reboot(self):
-        """ Reboots the Anritsu Machine
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("REBOOT")
-
-    def save_sequence_log(self, fileName):
-        """ Saves the Anritsu Sequence logs to file
-
-        Args:
-          fileName: log file name
-
-        Returns:
-            None
-        """
-        cmd = 'SAVESEQLOG "{}"'.format(fileName)
-        self.send_command(cmd)
-
-    def clear_sequence_log(self):
-        """ Clears the Anritsu Sequence logs
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("CLEARSEQLOG")
-
-    def save_message_log(self, fileName):
-        """ Saves the Anritsu Message logs to file
-
-        Args:
-          fileName: log file name
-
-        Returns:
-            None
-        """
-        cmd = 'SAVEMSGLOG "{}"'.format(fileName)
-        self.send_command(cmd)
-
-    def clear_message_log(self):
-        """ Clears the Anritsu Message logs
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("CLEARMSGLOG")
-
-    def save_trace_log(self, fileName, fileType, overwrite, start, end):
-        """ Saves the Anritsu Trace logs
-
-        Args:
-          fileName: log file name
-          fileType: file type (BINARY, TEXT, H245,PACKET, CPLABE)
-          overwrite: whether to over write
-          start: starting trace number
-          end: ending trace number
-
-        Returns:
-            None
-        """
-        cmd = 'SAVETRACELOG "{}",{},{},{},{}'.format(fileName, fileType,
-                                                     overwrite, start, end)
-        self.send_command(cmd)
-
-    def send_cmas_lte_wcdma(self, serialNo, messageID, warningMessage):
-        """ Sends a CMAS message
-
-        Args:
-          serialNo: serial number of CMAS message
-          messageID: CMAS message ID
-          warningMessage:  CMAS Warning message
-
-        Returns:
-            None
-        """
-        cmd = ('PWSSENDWM 3GPP,"BtsNo=1&WarningSystem=CMAS&SerialNo={}'
-               '&MessageID={}&wm={}"').format(serialNo, messageID,
-                                              warningMessage)
-        self.send_command(cmd)
-
-    def send_etws_lte_wcdma(self, serialNo, messageID, warningType,
-                            warningMessage, userAlertenable, popUpEnable):
-        """ Sends a ETWS message
-
-        Args:
-          serialNo: serial number of CMAS message
-          messageID: CMAS message ID
-          warningMessage:  CMAS Warning message
-
-        Returns:
-            None
-        """
-        cmd = (
-            'PWSSENDWM 3GPP,"BtsNo=1&WarningSystem=ETWS&SerialNo={}&'
-            'Primary=ON&PrimaryMessageID={}&Secondary=ON&SecondaryMessageID={}'
-            '&WarningType={}&wm={}&UserAlert={}&Popup={}&dcs=0x10&LanguageCode=en"'
-        ).format(serialNo, messageID, messageID, warningType, warningMessage,
-                 userAlertenable, popUpEnable)
-        self.send_command(cmd)
-
-    def send_cmas_etws_cdma1x(self, message_id, service_category, alert_ext,
-                              response_type, severity, urgency, certainty):
-        """ Sends a CMAS/ETWS message on CDMA 1X
-
-        Args:
-          serviceCategory: service category of alert
-          messageID: message ID
-          alertText: Warning message
-
-        Returns:
-            None
-        """
-        cmd = (
-            'PWSSENDWM 3GPP2,"BtsNo=1&ServiceCategory={}&MessageID={}&AlertText={}&'
-            'CharSet=ASCII&ResponseType={}&Severity={}&Urgency={}&Certainty={}"'
-        ).format(service_category, message_id, alert_ext, response_type,
-                 severity, urgency, certainty)
-        self.send_command(cmd)
-
-    @property
-    def csfb_type(self):
-        """ Gets the current CSFB type
-
-        Args:
-            None
-
-        Returns:
-            current CSFB type
-        """
-        return self.send_query("SIMMODELEX? CSFB")
-
-    @csfb_type.setter
-    def csfb_type(self, csfb_type):
-        """ sets the CSFB type
-        Args:
-            csfb_type: CSFB type
-
-        Returns:
-            None
-        """
-        if not isinstance(csfb_type, CsfbType):
-            raise ValueError('The parameter should be of type "CsfbType" ')
-        cmd = "SIMMODELEX CSFB," + csfb_type.value
-        self.send_command(cmd)
-
-    @property
-    def csfb_return_to_eutran(self):
-        """ Gets the current return to EUTRAN status
-
-        Args:
-            None
-
-        Returns:
-            current return to EUTRAN status
-        """
-        return self.send_query("SIMMODELEX? RETEUTRAN")
-
-    @csfb_return_to_eutran.setter
-    def csfb_return_to_eutran(self, enable):
-        """ sets the return to EUTRAN feature
-        Args:
-            enable: enable/disable return to EUTRAN feature
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, ReturnToEUTRAN):
-            raise ValueError(
-                'The parameter should be of type "ReturnToEUTRAN"')
-        cmd = "SIMMODELEX RETEUTRAN," + enable.value
-        self.send_command(cmd)
-
-    def set_packet_preservation(self):
-        """ Set packet state to Preservation
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEPACKET PRESERVATION"
-        self.send_command(cmd)
-
-    def set_packet_dormant(self):
-        """ Set packet state to Dormant
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEPACKET DORMANT"
-        self.send_command(cmd)
-
-    def get_ue_identity(self, identity_type):
-        """ Get the UE identity IMSI, IMEI, IMEISV
-
-        Args:
-            identity_type : IMSI/IMEI/IMEISV
-
-        Returns:
-            IMSI/IMEI/IMEISV value
-        """
-        bts, rat = self.get_camping_cell()
-        if rat == BtsTechnology.LTE.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_LTE.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_LTE
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_LTE
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_LTE
-            else:
-                return None
-        elif rat == BtsTechnology.WCDMA.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_WCDMA.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_WCDMA
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_WCDMA
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_WCDMA
-            else:
-                return None
-        elif rat == BtsTechnology.GSM.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_GSM.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_GSM
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_GSM
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_GSM
-            else:
-                return None
-        else:
-            return None
-
-        self.send_command("TMMESSAGEMODE {},USERDATA".format(identity_request))
-        time.sleep(SETTLING_TIME)
-        self.send_command("TMUSERDATA {}, {}, {}".format(
-            identity_request, userdata, IDENTITY_REQ_DATA_LEN))
-        time.sleep(SETTLING_TIME)
-        self.send_command("TMSENDUSERMSG {}".format(identity_request))
-        time.sleep(WAIT_TIME_IDENTITY_RESPONSE)
-        # Go through sequence log and find the identity response message
-        target = '"{}"'.format(identity_type.value)
-        seqlog = self.send_query("SEQLOG?").split(",")
-        while (target not in seqlog):
-            index = int(seqlog[0]) - 1
-            if index < SEQ_LOG_MESSAGE_START_INDEX:
-                self.log.error("Can not find " + target)
-                return None
-            seqlog = self.send_query("SEQLOG? %d" % index).split(",")
-        return (seqlog[-1])
-
-    def trigger_ue_capability_enquiry(self, requested_bands):
-        """ Triggers LTE RRC UE capability enquiry from callbox.
-
-        Args:
-            requested_bands: User data in hex format
-        """
-        self.set_trigger_message_mode(TriggerMessageIDs.UE_CAPABILITY_ENQUIRY)
-        time.sleep(SETTLING_TIME)
-        self.set_data_of_trigger_message(
-            TriggerMessageIDs.UE_CAPABILITY_ENQUIRY, requested_bands)
-        time.sleep(SETTLING_TIME)
-        self.send_trigger_message(TriggerMessageIDs.UE_CAPABILITY_ENQUIRY)
-        time.sleep(SETTLING_TIME)
-
-    def get_measured_pusch_power(self):
-        """ Queries the PUSCH power.
-
-        Returns:
-            A string indicating PUSCH power in each input port.
-        """
-        return self.send_query("MONITOR? UL_PUSCH")
-
-    def select_usim(self, usim):
-        """ Select pre-defined Anritsu USIM models
-
-        Args:
-            usim: any of P0035Bx, P0135Ax, P0250Ax, P0260Ax
-
-        Returns:
-            None
-        """
-        cmd = "SELECTUSIM {}".format(usim)
-        self.send_command(cmd)
-
-
-class _AnritsuTestCases(object):
-    '''Class to interact with the MD8475 supported test procedures '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def procedure(self):
-        """ Gets the current Test Procedure type
-
-        Args:
-            None
-
-        Returns:
-            One of TestProcedure type values
-        """
-        return self._anritsu.send_query("TESTPROCEDURE?")
-
-    @procedure.setter
-    def procedure(self, procedure):
-        """ sets the Test Procedure type
-        Args:
-            procedure: One of TestProcedure type values
-
-        Returns:
-            None
-        """
-        if not isinstance(procedure, TestProcedure):
-            raise ValueError(
-                'The parameter should be of type "TestProcedure" ')
-        cmd = "TESTPROCEDURE " + procedure.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def bts_direction(self):
-        """ Gets the current Test direction
-
-         Args:
-            None
-
-        Returns:
-            Current Test direction eg:BTS2,BTS1
-        """
-        return self._anritsu.send_query("TESTBTSDIRECTION?")
-
-    @bts_direction.setter
-    def bts_direction(self, direction):
-        """ sets the Test direction  eg: BTS1 to BTS2 '''
-
-        Args:
-            direction: tuple (from-bts,to_bts) of type BtsNumber
-
-        Returns:
-            None
-        """
-        if not isinstance(direction, tuple) or len(direction) != 2:
-            raise ValueError("Pass a tuple with two items")
-        from_bts, to_bts = direction
-        if (isinstance(from_bts, BtsNumber) and isinstance(to_bts, BtsNumber)):
-            cmd = "TESTBTSDIRECTION {},{}".format(from_bts.value, to_bts.value)
-            self._anritsu.send_command(cmd)
-        else:
-            raise ValueError(' The parameters should be of type "BtsNumber" ')
-
-    @property
-    def registration_timeout(self):
-        """ Gets the current Test registration timeout
-
-        Args:
-            None
-
-        Returns:
-            Current test registration timeout value
-        """
-        return self._anritsu.send_query("TESTREGISTRATIONTIMEOUT?")
-
-    @registration_timeout.setter
-    def registration_timeout(self, timeout_value):
-        """ sets the Test registration timeout value
-        Args:
-            timeout_value: test registration timeout value
-
-        Returns:
-            None
-        """
-        cmd = "TESTREGISTRATIONTIMEOUT " + str(timeout_value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def power_control(self):
-        """ Gets the power control enabled/disabled status for test case
-
-        Args:
-            None
-
-        Returns:
-            current power control enabled/disabled status
-        """
-        return self._anritsu.send_query("TESTPOWERCONTROL?")
-
-    @power_control.setter
-    def power_control(self, enable):
-        """ Sets the power control enabled/disabled status for test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestPowerControl):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestPowerControl" ')
-        cmd = "TESTPOWERCONTROL " + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_LTE(self):
-        """ Checks measurement status for LTE test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? LTE")
-
-    @measurement_LTE.setter
-    def measurement_LTE(self, enable):
-        """ Sets the measurement enabled/disabled status for LTE test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT LTE," + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_WCDMA(self):
-        """ Checks measurement status for WCDMA test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? WCDMA")
-
-    @measurement_WCDMA.setter
-    def measurement_WCDMA(self, enable):
-        """ Sets the measurement enabled/disabled status for WCDMA test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT WCDMA," + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_TDSCDMA(self):
-        """ Checks measurement status for TDSCDMA test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? TDSCDMA")
-
-    @measurement_TDSCDMA.setter
-    def measurement_WCDMA(self, enable):
-        """ Sets the measurement enabled/disabled status for TDSCDMA test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT TDSCDMA," + enable.value
-        self._anritsu.send_command(cmd)
-
-    def set_pdn_targeteps(self, pdn_order, pdn_number=1):
-        """ Sets PDN to connect as a target when performing the
-           test case for packet handover
-
-        Args:
-            pdn_order:  PRIORITY/USER
-            pdn_number: Target PDN number
-
-        Returns:
-            None
-        """
-        cmd = "TESTPDNTARGETEPS " + pdn_order
-        if pdn_order == "USER":
-            cmd = cmd + "," + str(pdn_number)
-        self._anritsu.send_command(cmd)
-
-
-class _BaseTransceiverStation(object):
-    '''Class to interact different BTS supported by MD8475 '''
-    def __init__(self, anritsu, btsnumber):
-        if not isinstance(btsnumber, BtsNumber):
-            raise ValueError(' The parameter should be of type "BtsNumber" ')
-        self._bts_number = btsnumber.value
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def output_level(self):
-        """ Gets the Downlink power of the cell
-
-        Args:
-            None
-
-        Returns:
-            DL Power level
-        """
-        cmd = "OLVL? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @output_level.setter
-    def output_level(self, level):
-        """ Sets the Downlink power of the cell
-
-        Args:
-            level: Power level
-
-        Returns:
-            None
-        """
-        counter = 1
-        while float(level) != float(self.output_level):
-            if counter > 3:
-                raise AnritsuError("Fail to set output level in 3 tries!")
-            cmd = "OLVL {},{}".format(level, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def input_level(self):
-        """ Gets the reference power of the cell
-
-        Args:
-            None
-
-        Returns:
-            Reference Power level
-        """
-        cmd = "RFLVL? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @input_level.setter
-    def input_level(self, level):
-        """ Sets the reference power of the cell
-
-        Args:
-            level: Power level
-
-        Returns:
-            None
-        """
-        counter = 1
-        while float(level) != float(self.input_level):
-            if counter > 3:
-                raise AnritsuError("Fail to set intput level in 3 tries!")
-            cmd = "RFLVL {},{}".format(level, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def band(self):
-        """ Gets the Band of the cell
-
-        Args:
-            None
-
-        Returns:
-            Cell band
-        """
-        cmd = "BAND? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @band.setter
-    def band(self, band):
-        """ Sets the Band of the cell
-
-        Args:
-            band: Band of the cell
-
-        Returns:
-            None
-        """
-        cmd = "BAND {},{}".format(band, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def transmode(self):
-        """ Gets the Transmission Mode of the cell
-
-        Args:
-            None
-
-        Returns:
-            Transmission mode
-        """
-        cmd = "TRANSMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @transmode.setter
-    def transmode(self, tm_mode):
-        """ Sets the TM of the cell
-
-        Args:
-            TM: TM of the cell
-
-        Returns:
-            None
-        """
-        cmd = "TRANSMODE {},{}".format(tm_mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def duplex_mode(self):
-        """ Gets the Duplex Mode of the cell
-
-        Args:
-            None
-
-        Returns:
-            Duplex mode
-        """
-        cmd = "DUPLEXMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @duplex_mode.setter
-    def duplex_mode(self, mode):
-        """ Sets the duplex mode for the cell
-
-        Args:
-            mode: string indicating FDD or TDD
-
-        Returns:
-            None
-        """
-        cmd = "DUPLEXMODE {},{}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def uldl_configuration(self):
-        """ Gets the UL/DL pattern configuration for TDD bands
-
-        Args:
-            None
-
-        Returns:
-            Configuration number
-        """
-        cmd = "ULDLCONFIGURATION? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @uldl_configuration.setter
-    def uldl_configuration(self, configuration):
-        """ Sets the UL/DL pattern configuration for TDD bands
-
-        Args:
-            configuration: configuration number, [ 0, 6 ] inclusive
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: Frame structure has to be [ 0, 6 ] inclusive
-        """
-        if configuration not in range(0, 7):
-            raise ValueError("The frame structure configuration has to be a "
-                             "number between 0 and 6 inclusive")
-
-        cmd = "ULDLCONFIGURATION {},{}".format(configuration, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cfi(self):
-        """ Gets the Control Format Indicator for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The CFI number.
-        """
-        cmd = "CFI? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cfi.setter
-    def cfi(self, cfi):
-        """ Sets the Control Format Indicator for this base station.
-
-        Args:
-            cfi: one of BESTEFFORT, AUTO, 1, 2 or 3.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if cfi's value is invalid
-        """
-
-        cfi = str(cfi)
-
-        valid_values = {'BESTEFFORT', 'AUTO', '1', '2', '3'}
-        if cfi not in valid_values:
-            raise ValueError('Valid values for CFI are %r' % valid_values)
-
-        cmd = "CFI {},{}".format(cfi, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def paging_duration(self):
-        """ Gets the paging cycle duration for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The paging cycle duration in milliseconds.
-        """
-        cmd = "PCYCLE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @paging_duration.setter
-    def paging_duration(self, duration):
-        """ Sets the paging cycle duration for this base station.
-
-        Args:
-            duration: the paging cycle duration in milliseconds.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if duration's value is invalid
-        """
-
-        duration = int(duration)
-
-        valid_values = {320, 640, 1280, 2560}
-        if duration not in valid_values:
-            raise ValueError('Valid values for the paging cycle duration are '
-                             '%r.' % valid_values)
-
-        cmd = "PCYCLE {},{}".format(duration, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def phich_resource(self):
-        """ Gets the PHICH Resource setting for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The PHICH Resource setting.
-        """
-        cmd = "PHICHRESOURCE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @phich_resource.setter
-    def phich_resource(self, phich):
-        """ Sets the PHICH Resource setting for this base station.
-
-        Args:
-            phich: one of 1/6, 1/2, 1, 2.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if phich's value is invalid
-        """
-
-        phich = str(phich)
-
-        valid_values = ['1/6', '1/2', '1', '2']
-        if phich not in valid_values:
-            raise ValueError('Valid values for PHICH Resource are %r' %
-                             valid_values)
-
-        cmd = "PHICHRESOURCE {},{}".format(phich, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tdd_special_subframe(self):
-        """ Gets SPECIALSUBFRAME of cell.
-
-        Args:
-            None
-
-        Returns:
-            tdd_special_subframe: integer between 0,9 inclusive
-        """
-        cmd = "SPECIALSUBFRAME? " + self._bts_number
-        tdd_special_subframe = int(self._anritsu.send_query(cmd))
-        return tdd_special_subframe
-
-    @tdd_special_subframe.setter
-    def tdd_special_subframe(self, tdd_special_subframe):
-        """ Sets SPECIALSUBFRAME of cell.
-
-        Args:
-            tdd_special_subframe: int between 0,9 inclusive
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: tdd_special_subframe has to be between 0,9 inclusive
-        """
-        if tdd_special_subframe not in range(0, 10):
-            raise ValueError("The special subframe config is not [0,9]")
-        cmd = "SPECIALSUBFRAME {},{}".format(tdd_special_subframe,
-                                             self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_antenna(self):
-        """ Gets the DL ANTENNA count of the cell
-
-        Args:
-            None
-
-        Returns:
-            No of DL Antenna
-        """
-        cmd = "ANTENNAS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_antenna.setter
-    def dl_antenna(self, num_antenna):
-        """ Sets the DL ANTENNA of the cell
-
-        Args:
-            c: DL ANTENNA of the cell
-
-        Returns:
-            None
-        """
-        cmd = "ANTENNAS {},{}".format(num_antenna, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def bandwidth(self):
-        """ Gets the channel bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            channel bandwidth
-        """
-        cmd = "BANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @bandwidth.setter
-    def bandwidth(self, bandwidth):
-        """ Sets the channel bandwidth of the cell
-
-        Args:
-            bandwidth: channel bandwidth  of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(' The parameter should be of type "BtsBandwidth"')
-        cmd = "BANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_bandwidth(self):
-        """ Gets the downlink bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            downlink bandwidth
-        """
-        cmd = "DLBANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_bandwidth.setter
-    def dl_bandwidth(self, bandwidth):
-        """ Sets the downlink bandwidth of the cell
-
-        Args:
-            bandwidth: downlink bandwidth of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(' The parameter should be of type "BtsBandwidth"')
-        cmd = "DLBANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ul_bandwidth(self):
-        """ Gets the uplink bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink bandwidth
-        """
-        cmd = "ULBANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @ul_bandwidth.setter
-    def ul_bandwidth(self, bandwidth):
-        """ Sets the uplink bandwidth of the cell
-
-        Args:
-            bandwidth: uplink bandwidth of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(
-                ' The parameter should be of type "BtsBandwidth" ')
-        cmd = "ULBANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def packet_rate(self):
-        """ Gets the packet rate of the cell
-
-        Args:
-            None
-
-        Returns:
-            packet rate
-        """
-        cmd = "PACKETRATE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @packet_rate.setter
-    def packet_rate(self, packetrate):
-        """ Sets the packet rate of the cell
-
-        Args:
-            packetrate: packet rate of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(packetrate, BtsPacketRate):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketRate" ')
-        cmd = "PACKETRATE {},{}".format(packetrate.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ul_windowsize(self):
-        """ Gets the uplink window size of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink window size
-        """
-        cmd = "ULWINSIZE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @ul_windowsize.setter
-    def ul_windowsize(self, windowsize):
-        """ Sets the uplink window size of the cell
-
-        Args:
-            windowsize: uplink window size of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(windowsize, BtsPacketWindowSize):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketWindowSize" ')
-        cmd = "ULWINSIZE {},{}".format(windowsize.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_windowsize(self):
-        """ Gets the downlink window size of the cell
-
-        Args:
-            None
-
-        Returns:
-            downlink window size
-        """
-        cmd = "DLWINSIZE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_windowsize.setter
-    def dl_windowsize(self, windowsize):
-        """ Sets the downlink window size of the cell
-
-        Args:
-            windowsize: downlink window size of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(windowsize, BtsPacketWindowSize):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketWindowSize" ')
-        cmd = "DLWINSIZE {},{}".format(windowsize.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def service_state(self):
-        """ Gets the service state of BTS
-
-        Args:
-            None
-
-        Returns:
-            service state IN/OUT
-        """
-        cmd = "OUTOFSERVICE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @service_state.setter
-    def service_state(self, service_state):
-        """ Sets the service state of BTS
-
-        Args:
-            service_state: service state of BTS , IN/OUT
-
-        Returns:
-            None
-        """
-        if not isinstance(service_state, BtsServiceState):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsServiceState" ')
-        cmd = "OUTOFSERVICE {},{}".format(service_state.value,
-                                          self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell_barred(self):
-        """ Gets the Cell Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsCellBarred value
-        """
-        cmd = "CELLBARRED?" + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cell_barred.setter
-    def cell_barred(self, barred_option):
-        """ Sets the Cell Barred state of the cell
-
-        Args:
-            barred_option: Cell Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsCellBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsCellBarred" ')
-        cmd = "CELLBARRED {},{}".format(barred_option.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def accessclass_barred(self):
-        """ Gets the Access Class Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsAccessClassBarred value
-        """
-        cmd = "ACBARRED? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @accessclass_barred.setter
-    def accessclass_barred(self, barred_option):
-        """ Sets the Access Class Barred state of the cell
-
-        Args:
-            barred_option: Access Class Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsAccessClassBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsAccessClassBarred" ')
-        cmd = "ACBARRED {},{}".format(barred_option.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lteemergency_ac_barred(self):
-        """ Gets the LTE emergency Access Class Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsLteEmergencyAccessClassBarred value
-        """
-        cmd = "LTEEMERGENCYACBARRED? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lteemergency_ac_barred.setter
-    def lteemergency_ac_barred(self, barred_option):
-        """ Sets the LTE emergency Access Class Barred state of the cell
-
-        Args:
-            barred_option: Access Class Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsLteEmergencyAccessClassBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsLteEmergencyAccessClassBarred" ')
-        cmd = "LTEEMERGENCYACBARRED {},{}".format(barred_option.value,
-                                                  self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def mcc(self):
-        """ Gets the MCC of the cell
-
-        Args:
-            None
-
-        Returns:
-            MCC of the cell
-        """
-        cmd = "MCC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mcc.setter
-    def mcc(self, mcc_code):
-        """ Sets the MCC of the cell
-
-        Args:
-            mcc_code: MCC of the cell
-
-        Returns:
-            None
-        """
-        cmd = "MCC {},{}".format(mcc_code, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def mnc(self):
-        """ Gets the MNC of the cell
-
-        Args:
-            None
-
-        Returns:
-            MNC of the cell
-        """
-        cmd = "MNC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mnc.setter
-    def mnc(self, mnc_code):
-        """ Sets the MNC of the cell
-
-        Args:
-            mnc_code: MNC of the cell
-
-        Returns:
-            None
-        """
-        cmd = "MNC {},{}".format(mnc_code, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_fullname_enable(self):
-        """ Gets the network full name enable status
-
-        Args:
-            None
-
-        Returns:
-            one of BtsNwNameEnable value
-        """
-        cmd = "NWFNAMEON? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_fullname_enable.setter
-    def nw_fullname_enable(self, enable):
-        """ Sets the network full name enable status
-
-        Args:
-            enable: network full name enable status
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, BtsNwNameEnable):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsNwNameEnable" ')
-        cmd = "NWFNAMEON {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_fullname(self):
-        """ Gets the network full name
-
-        Args:
-            None
-
-        Returns:
-            Network fulll name
-        """
-        cmd = "NWFNAME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_fullname.setter
-    def nw_fullname(self, fullname):
-        """ Sets the network full name
-
-        Args:
-            fullname: network full name
-
-        Returns:
-            None
-        """
-        cmd = "NWFNAME {},{}".format(fullname, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_shortname_enable(self):
-        """ Gets the network short name enable status
-
-        Args:
-            None
-
-        Returns:
-            one of BtsNwNameEnable value
-        """
-        cmd = "NWSNAMEON? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_shortname_enable.setter
-    def nw_shortname_enable(self, enable):
-        """ Sets the network short name enable status
-
-        Args:
-            enable: network short name enable status
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, BtsNwNameEnable):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsNwNameEnable" ')
-        cmd = "NWSNAMEON {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_shortname(self):
-        """ Gets the network short name
-
-        Args:
-            None
-
-        Returns:
-            Network short name
-        """
-        cmd = "NWSNAME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_shortname.setter
-    def nw_shortname(self, shortname):
-        """ Sets the network short name
-
-        Args:
-            shortname: network short name
-
-        Returns:
-            None
-        """
-        cmd = "NWSNAME {},{}".format(shortname, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def apply_parameter_changes(self):
-        """ apply the parameter changes at run time
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "APPLYPARAM"
-        self._anritsu.send_command(cmd)
-
-    @property
-    def wcdma_ctch(self):
-        """ Gets the WCDMA CTCH enable/disable status
-
-        Args:
-            None
-
-        Returns:
-            one of CTCHSetup values
-        """
-        cmd = "CTCHPARAMSETUP? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @wcdma_ctch.setter
-    def wcdma_ctch(self, enable):
-        """ Sets the WCDMA CTCH enable/disable status
-
-        Args:
-            enable: WCDMA CTCH enable/disable status
-
-        Returns:
-            None
-        """
-        cmd = "CTCHPARAMSETUP {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lac(self):
-        """ Gets the Location Area Code of the cell
-
-        Args:
-            None
-
-        Returns:
-            LAC value
-        """
-        cmd = "LAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lac.setter
-    def lac(self, lac):
-        """ Sets the Location Area Code of the cell
-
-        Args:
-            lac: Location Area Code of the cell
-
-        Returns:
-            None
-        """
-        cmd = "LAC {},{}".format(lac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def rac(self):
-        """ Gets the Routing Area Code of the cell
-
-        Args:
-            None
-
-        Returns:
-            RAC value
-        """
-        cmd = "RAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @rac.setter
-    def rac(self, rac):
-        """ Sets the Routing Area Code of the cell
-
-        Args:
-            rac: Routing Area Code of the cell
-
-        Returns:
-            None
-        """
-        cmd = "RAC {},{}".format(rac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_channel(self):
-        """ Gets the downlink channel number of the cell
-
-        Args:
-            None
-
-        Returns:
-            RAC value
-        """
-        cmd = "DLCHAN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_channel.setter
-    def dl_channel(self, channel):
-        """ Sets the downlink channel number of the cell
-
-        Args:
-            channel: downlink channel number of the cell
-
-        Returns:
-            None
-        """
-        cmd = "DLCHAN {},{}".format(channel, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_cc_enabled(self):
-        """ Checks if component carrier is enabled or disabled
-
-        Args:
-            None
-
-        Returns:
-            True if enabled, False if disabled
-        """
-        return (self._anritsu.send_query("TESTDLCC?" +
-                                         self._bts_number) == "ENABLE")
-
-    @dl_cc_enabled.setter
-    def dl_cc_enabled(self, enabled):
-        """ Enables or disables the component carrier
-
-        Args:
-            enabled: True if it should be enabled, False if disabled
-
-        Returns:
-            None
-        """
-        cmd = "TESTDLCC {},{}".format("ENABLE" if enabled else "DISABLE",
-                                      self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_mcc(self):
-        """ Gets the sector 1 MCC of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 mcc
-        """
-        cmd = "S1MCC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_mcc.setter
-    def sector1_mcc(self, mcc):
-        """ Sets the sector 1 MCC of the CDMA cell
-
-        Args:
-            mcc: sector 1 MCC of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1MCC {},{}".format(mcc, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_sid(self):
-        """ Gets the sector 1 system ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 system Id
-        """
-        cmd = "S1SID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_sid.setter
-    def sector1_sid(self, sid):
-        """ Sets the sector 1 system ID of the CDMA cell
-
-        Args:
-            sid: sector 1 system ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1SID {},{}".format(sid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_nid(self):
-        """ Gets the sector 1 network ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 network Id
-        """
-        cmd = "S1NID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_nid.setter
-    def sector1_nid(self, nid):
-        """ Sets the sector 1 network ID of the CDMA cell
-
-        Args:
-            nid: sector 1 network ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1NID {},{}".format(nid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_baseid(self):
-        """ Gets the sector 1 Base ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 Base Id
-        """
-        cmd = "S1BASEID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_baseid.setter
-    def sector1_baseid(self, baseid):
-        """ Sets the sector 1 Base ID of the CDMA cell
-
-        Args:
-            baseid: sector 1 Base ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1BASEID {},{}".format(baseid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_latitude(self):
-        """ Gets the sector 1 latitude of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 latitude
-        """
-        cmd = "S1LATITUDE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_latitude.setter
-    def sector1_latitude(self, latitude):
-        """ Sets the sector 1 latitude of the CDMA cell
-
-        Args:
-            latitude: sector 1 latitude of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1LATITUDE {},{}".format(latitude, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_longitude(self):
-        """ Gets the sector 1 longitude of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 longitude
-        """
-        cmd = "S1LONGITUDE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_longitude.setter
-    def sector1_longitude(self, longitude):
-        """ Sets the sector 1 longitude of the CDMA cell
-
-        Args:
-            longitude: sector 1 longitude of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1LONGITUDE {},{}".format(longitude, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def evdo_sid(self):
-        """ Gets the Sector ID of the EVDO cell
-
-        Args:
-            None
-
-        Returns:
-            Sector Id
-        """
-        cmd = "S1SECTORID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @evdo_sid.setter
-    def evdo_sid(self, sid):
-        """ Sets the Sector ID of the EVDO cell
-
-        Args:
-            sid: Sector ID of the EVDO cell
-
-        Returns:
-            None
-        """
-        cmd = "S1SECTORID {},{}".format(sid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell_id(self):
-        """ Gets the cell identity of the cell
-
-        Args:
-            None
-
-        Returns:
-            cell identity
-        """
-        cmd = "CELLID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cell_id.setter
-    def cell_id(self, cell_id):
-        """ Sets the cell identity of the cell
-
-        Args:
-            cell_id: cell identity of the cell
-
-        Returns:
-            None
-        """
-        cmd = "CELLID {},{}".format(cell_id, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def physical_cellid(self):
-        """ Gets the physical cell id of the cell
-
-        Args:
-            None
-
-        Returns:
-            physical cell id
-        """
-        cmd = "PHYCELLID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @physical_cellid.setter
-    def physical_cellid(self, physical_cellid):
-        """ Sets the physical cell id of the cell
-
-        Args:
-            physical_cellid: physical cell id of the cell
-
-        Returns:
-            None
-        """
-        cmd = "PHYCELLID {},{}".format(physical_cellid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_mcs_dl(self):
-        """ Gets the Modulation and Coding scheme (DL) of the GSM cell
-
-        Args:
-            None
-
-        Returns:
-            DL MCS
-        """
-        cmd = "DLMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_mcs_dl.setter
-    def gsm_mcs_dl(self, mcs_dl):
-        """ Sets the Modulation and Coding scheme (DL) of the GSM cell
-
-        Args:
-            mcs_dl: Modulation and Coding scheme (DL) of the GSM cell
-
-        Returns:
-            None
-        """
-        cmd = "DLMCS {},{}".format(mcs_dl, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_mcs_ul(self):
-        """ Gets the Modulation and Coding scheme (UL) of the GSM cell
-
-        Args:
-            None
-
-        Returns:
-            UL MCS
-        """
-        cmd = "ULMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_mcs_ul.setter
-    def gsm_mcs_ul(self, mcs_ul):
-        """ Sets the Modulation and Coding scheme (UL) of the GSM cell
-
-        Args:
-            mcs_ul:Modulation and Coding scheme (UL) of the GSM cell
-
-        Returns:
-            None
-        """
-        cmd = "ULMCS {},{}".format(mcs_ul, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_scheduling_mode(self):
-        """ Gets the Scheduling mode of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            Scheduling mode
-        """
-        cmd = "SCHEDULEMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_scheduling_mode.setter
-    def lte_scheduling_mode(self, mode):
-        """ Sets the Scheduling mode of the LTE cell
-
-        Args:
-            mode: STATIC (default) or DYNAMIC
-
-        Returns:
-            None
-        """
-        counter = 1
-        while mode != self.lte_scheduling_mode:
-            if counter > 3:
-                raise AnritsuError("Fail to set scheduling mode in 3 tries!")
-            cmd = "SCHEDULEMODE {},{}".format(mode, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def tbs_pattern(self):
-        """ Gets the TBS Pattern setting for the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            TBS Pattern setting
-        """
-        cmd = "TBSPATTERN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @tbs_pattern.setter
-    def tbs_pattern(self, pattern):
-        """ Sets the TBS Pattern setting for the LTE cell
-
-        Args:
-            mode: "FULLALLOCATION" or "OFF"
-
-        Returns:
-            None
-        """
-        cmd = "TBSPATTERN {}, {}".format(pattern, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_connected_mode(self):
-        """ Gets the Connected DRX LTE cell parameter
-
-        Args:
-            None
-
-        Returns:
-            DRX connected mode (OFF, AUTO, MANUAL)
-        """
-        cmd = "DRXCONN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_connected_mode.setter
-    def drx_connected_mode(self, mode):
-        """  Sets the Connected DRX LTE cell parameter
-
-        Args:
-            mode: OFF, AUTO, MANUAL
-
-        Returns:
-            None
-        """
-        cmd = "DRXCONN {}, {}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_on_duration_timer(self):
-        """ Gets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            None
-
-        Returns:
-            DRX mode duration timer
-        """
-        cmd = "DRXDURATIONTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_on_duration_timer.setter
-    def drx_on_duration_timer(self, time):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            timer: Amount of PDCCH subframes to wait for user data
-                to be transmitted
-
-        Returns:
-            None
-        """
-        cmd = "DRXDURATIONTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_inactivity_timer(self):
-        """ Gets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            None
-
-        Returns:
-            DRX mode inactivity timer
-        """
-        cmd = "DRXINACTIVITYTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_inactivity_timer.setter
-    def drx_inactivity_timer(self, time):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            timer: Length of the interval to wait
-
-        Returns:
-            None
-        """
-        cmd = "DRXINACTIVITYTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_retransmission_timer(self):
-        """ Gets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            None
-
-        Returns:
-            Number of PDCCH subframes to wait for retransmission
-        """
-        cmd = "DRXRETRANSTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_retransmission_timer.setter
-    def drx_retransmission_timer(self, time):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            time: Number of PDCCH subframes to wait
-            for retransmission
-
-        Returns:
-            None
-        """
-        cmd = "DRXRETRANSTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_long_cycle(self):
-        """ Gets the amount of subframes representing a DRX long cycle
-
-        Args:
-            None
-
-        Returns:
-            The amount of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        """
-        cmd = "DRXLONGCYCLE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_long_cycle.setter
-    def drx_long_cycle(self, time):
-        """ Sets the amount of subframes representing a DRX long cycle
-
-        Args:
-            long_cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-
-        Returns:
-            None
-        """
-        cmd = "DRXLONGCYCLE SF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_long_cycle_offset(self):
-        """ Gets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            None
-
-        Returns:
-            Long cycle offset
-        """
-        cmd = "DRXLONGCYCLESTARTOFFSET? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_long_cycle_offset.setter
-    def drx_long_cycle_offset(self, offset):
-        """ Sets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            offset: Number in range 0...(long cycle - 1)
-        """
-        cmd = "DRXLONGCYCLESTARTOFFSET {}, {}".format(offset, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_mcs_dl(self):
-        """ Gets the Modulation and Coding scheme (DL) of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            DL MCS
-        """
-        cmd = "DLIMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_mcs_dl.setter
-    def lte_mcs_dl(self, mcs_dl):
-        """ Sets the Modulation and Coding scheme (DL) of the LTE cell
-
-        Args:
-            mcs_dl: Modulation and Coding scheme (DL) of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "DLIMCS {},{}".format(mcs_dl, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_mcs_ul(self):
-        """ Gets the Modulation and Coding scheme (UL) of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            UL MCS
-        """
-        cmd = "ULIMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_mcs_ul.setter
-    def lte_mcs_ul(self, mcs_ul):
-        """ Sets the Modulation and Coding scheme (UL) of the LTE cell
-
-        Args:
-            mcs_ul: Modulation and Coding scheme (UL) of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "ULIMCS {},{}".format(mcs_ul, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_dl_modulation_order(self):
-        """ Gets the DL modulation order of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            The DL modulation order
-        """
-        cmd = "DLRMC_MOD? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_dl_modulation_order.setter
-    def lte_dl_modulation_order(self, order):
-        """ Sets the DL modulation order of the LTE cell
-
-        Args:
-            order: the DL modulation order of the LTE cell
-
-        Returns:
-            None
-        """
-        if isinstance(order, ModulationType):
-            order = order.value
-        cmd = "DLRMC_MOD {},{}".format(order, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_ul_modulation_order(self):
-        """ Gets the UL modulation order of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            The UL modulation order
-        """
-        cmd = "ULRMC_MOD? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_ul_modulation_order.setter
-    def lte_ul_modulation_order(self, order):
-        """ Sets the UL modulation order of the LTE cell
-
-        Args:
-            order: the UL modulation order of the LTE cell
-
-        Returns:
-            None
-        """
-        if isinstance(order, ModulationType):
-            order = order.value
-        cmd = "ULRMC_MOD {},{}".format(order, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nrb_dl(self):
-        """ Gets the Downlink N Resource Block of the cell
-
-        Args:
-            None
-
-        Returns:
-            Downlink NRB
-        """
-        cmd = "DLNRB? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nrb_dl.setter
-    def nrb_dl(self, blocks):
-        """ Sets the Downlink N Resource Block of the cell
-
-        Args:
-            blocks: Downlink N Resource Block of the cell
-
-        Returns:
-            None
-        """
-        cmd = "DLNRB {},{}".format(blocks, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nrb_ul(self):
-        """ Gets the uplink N Resource Block of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink NRB
-        """
-        cmd = "ULNRB? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nrb_ul.setter
-    def nrb_ul(self, blocks):
-        """ Sets the uplink N Resource Block of the cell
-
-        Args:
-            blocks: uplink N Resource Block of the cell
-
-        Returns:
-            None
-        """
-        cmd = "ULNRB {},{}".format(blocks, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def max_nrb_ul(self):
-        ul_bandwidth = self.ul_bandwidth
-        if ul_bandwidth == 'SAMEASDL':
-            ul_bandwidth = self.dl_bandwidth
-        max_nrb = MAX_NRB_FOR_BANDWIDTH.get(ul_bandwidth, None)
-        if not max_nrb:
-            raise ValueError('Could not get maximum RB allocation'
-                             'for bandwidth: {}'.format(ul_bandwidth))
-        return max_nrb
-
-    @property
-    def mimo_support(self):
-        """ Gets the maximum supported MIMO mode for the LTE bases tation.
-
-        Returns:
-            the MIMO mode as a string
-        """
-        cmd = "LTEMIMO? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mimo_support.setter
-    def mimo_support(self, mode):
-        """ Sets the maximum supported MIMO mode for the LTE base station.
-
-        Args:
-            mode: a string or an object of the LteMimoMode class.
-        """
-
-        if isinstance(mode, LteMimoMode):
-            mode = mode.value
-
-        cmd = "LTEMIMO {},{}".format(self._bts_number, mode)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def neighbor_cell_mode(self):
-        """ Gets the neighbor cell mode
-
-        Args:
-            None
-
-        Returns:
-            current neighbor cell mode
-        """
-        cmd = "NCLIST? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @neighbor_cell_mode.setter
-    def neighbor_cell_mode(self, mode):
-        """ Sets the neighbor cell mode
-
-        Args:
-            mode: neighbor cell mode , DEFAULT/ USERDATA
-
-        Returns:
-            None
-        """
-        cmd = "NCLIST {},{}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_type(self, system, index):
-        """ Gets the neighbor cell type
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell type
-        """
-        cmd = "NCTYPE? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def set_neighbor_cell_type(self, system, index, cell_type):
-        """ Sets the neighbor cell type
-
-        Args:
-            system: simulation model of neighbor cell
-                   LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-            cell_type: cell type
-                BTS1, BTS2, BTS3, BTS4,CELLNAME, DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "NCTYPE {},{},{},{}".format(system, index, cell_type,
-                                          self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_name(self, system, index):
-        """ Gets the neighbor cell name
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell name
-        """
-        cmd = "NCCELLNAME? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def set_neighbor_cell_name(self, system, index, name):
-        """ Sets the neighbor cell name
-
-        Args:
-            system: simulation model of neighbor cell
-                   LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-            name: cell name
-
-        Returns:
-            None
-        """
-        cmd = "NCCELLNAME {},{},{},{}".format(system, index, name,
-                                              self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_mcc(self, system, index):
-        """ Gets the neighbor cell mcc
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell mcc
-        """
-        cmd = "NCMCC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_mnc(self, system, index):
-        """ Gets the neighbor cell mnc
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell mnc
-        """
-        cmd = "NCMNC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_id(self, system, index):
-        """ Gets the neighbor cell id
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell id
-        """
-        cmd = "NCCELLID? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_tac(self, system, index):
-        """ Gets the neighbor cell tracking area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking area code
-        """
-        cmd = "NCTAC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_dl_channel(self, system, index):
-        """ Gets the neighbor cell downlink channel
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking downlink channel
-        """
-        cmd = "NCDLCHAN? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_dl_bandwidth(self, system, index):
-        """ Gets the neighbor cell downlink bandwidth
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking downlink bandwidth
-        """
-        cmd = "NCDLBANDWIDTH {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_pcid(self, system, index):
-        """ Gets the neighbor cell physical cell id
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell physical cell id
-        """
-        cmd = "NCPHYCELLID {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_lac(self, system, index):
-        """ Gets the neighbor cell location area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell location area code
-        """
-        cmd = "NCLAC {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_rac(self, system, index):
-        """ Gets the neighbor cell routing area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell routing area code
-        """
-        cmd = "NCRAC {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    @property
-    def primary_scrambling_code(self):
-        """ Gets the primary scrambling code for WCDMA cell
-
-        Args:
-            None
-
-        Returns:
-            primary scrambling code
-        """
-        cmd = "PRISCRCODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @primary_scrambling_code.setter
-    def primary_scrambling_code(self, psc):
-        """ Sets the primary scrambling code for WCDMA cell
-
-        Args:
-            psc: primary scrambling code
-
-        Returns:
-            None
-        """
-        cmd = "PRISCRCODE {},{}".format(psc, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tac(self):
-        """ Gets the Tracking Area Code of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            Tracking Area Code of the LTE cell
-        """
-        cmd = "TAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @tac.setter
-    def tac(self, tac):
-        """ Sets the Tracking Area Code of the LTE cell
-
-        Args:
-            tac: Tracking Area Code of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "TAC {},{}".format(tac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell(self):
-        """ Gets the current cell for BTS
-
-        Args:
-            None
-
-        Returns:
-            current cell for BTS
-        """
-        cmd = "CELLSEL? {}".format(self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    @cell.setter
-    def cell(self, cell_name):
-        """ sets the  cell for BTS
-        Args:
-            cell_name: cell name
-
-        Returns:
-            None
-        """
-        cmd = "CELLSEL {},{}".format(self._bts_number, cell_name)
-        return self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_cbch(self):
-        """ Gets the GSM CBCH enable/disable status
-
-        Args:
-            None
-
-        Returns:
-            one of CBCHSetup values
-        """
-        cmd = "CBCHPARAMSETUP? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_cbch.setter
-    def gsm_cbch(self, enable):
-        """ Sets the GSM CBCH enable/disable status
-
-        Args:
-            enable: GSM CBCH enable/disable status
-
-        Returns:
-            None
-        """
-        cmd = "CBCHPARAMSETUP {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_gprs_mode(self):
-        """ Gets the GSM connection mode
-
-        Args:
-            None
-
-        Returns:
-            A string indicating if connection is EGPRS, GPRS or non-GPRS
-        """
-        cmd = "GPRS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_gprs_mode.setter
-    def gsm_gprs_mode(self, mode):
-        """ Sets the GPRS connection mode
-
-        Args:
-            mode: GPRS connection mode
-
-        Returns:
-            None
-        """
-
-        if not isinstance(mode, BtsGprsMode):
-            raise ValueError(' The parameter should be of type "BtsGprsMode"')
-        cmd = "GPRS {},{}".format(mode.value, self._bts_number)
-
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_slots(self):
-        """ Gets the GSM slot assignment
-
-        Args:
-            None
-
-        Returns:
-            A tuple indicating DL and UL slots.
-        """
-
-        cmd = "MLTSLTCFG? " + self._bts_number
-
-        response = self._anritsu.send_query(cmd)
-        split_response = response.split(',')
-
-        if not len(split_response) == 2:
-            raise ValueError(response)
-
-        return response[0], response[1]
-
-    @gsm_slots.setter
-    def gsm_slots(self, slots):
-        """ Sets the number of downlink / uplink slots for GSM
-
-        Args:
-            slots: a tuple containing two ints indicating (DL,UL)
-
-        Returns:
-            None
-        """
-
-        try:
-            dl, ul = slots
-            dl = int(dl)
-            ul = int(ul)
-        except:
-            raise ValueError(
-                'The parameter slot has to be a tuple containing two ints '
-                'indicating (dl,ul) slots.')
-
-        # Validate
-        if dl < 1 or ul < 1 or dl + ul > 5:
-            raise ValueError(
-                'DL and UL slots have to be >= 1 and the sum <= 5.')
-
-        cmd = "MLTSLTCFG {},{},{}".format(dl, ul, self._bts_number)
-
-        self._anritsu.send_command(cmd)
-
-
-class _VirtualPhone(object):
-    '''Class to interact with virtual phone supported by MD8475 '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def id(self):
-        """ Gets the virtual phone ID
-
-        Args:
-            None
-
-        Returns:
-            virtual phone ID
-        """
-        cmd = "VPID? "
-        return self._anritsu.send_query(cmd)
-
-    @id.setter
-    def id(self, phonenumber):
-        """ Sets the virtual phone ID
-
-        Args:
-            phonenumber: virtual phone ID
-
-        Returns:
-            None
-        """
-        cmd = "VPID {}".format(phonenumber)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def id_c2k(self):
-        """ Gets the virtual phone ID for CDMA 1x
-
-        Args:
-            None
-
-        Returns:
-            virtual phone ID
-        """
-        cmd = "VPIDC2K? "
-        return self._anritsu.send_query(cmd)
-
-    @id_c2k.setter
-    def id_c2k(self, phonenumber):
-        """ Sets the virtual phone ID for CDMA 1x
-
-        Args:
-            phonenumber: virtual phone ID
-
-        Returns:
-            None
-        """
-        cmd = "VPIDC2K {}".format(phonenumber)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def auto_answer(self):
-        """ Gets the auto answer status of virtual phone
-
-        Args:
-            None
-
-        Returns:
-            auto answer status, ON/OFF
-        """
-        cmd = "VPAUTOANSWER? "
-        return self._anritsu.send_query(cmd)
-
-    @auto_answer.setter
-    def auto_answer(self, option):
-        """ Sets the auto answer feature
-
-        Args:
-            option: tuple with two items for turning on Auto Answer
-                    (OFF or (ON, timetowait))
-
-        Returns:
-            None
-        """
-        enable = "OFF"
-        time = 5
-
-        try:
-            enable, time = option
-        except ValueError:
-            if enable != "OFF":
-                raise ValueError("Pass a tuple with two items for"
-                                 " Turning on Auto Answer")
-        cmd = "VPAUTOANSWER {},{}".format(enable.value, time)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def calling_mode(self):
-        """ Gets the calling mode of virtual phone
-
-        Args:
-            None
-
-        Returns:
-            calling mode of virtual phone
-        """
-        cmd = "VPCALLINGMODE? "
-        return self._anritsu.send_query(cmd)
-
-    @calling_mode.setter
-    def calling_mode(self, calling_mode):
-        """ Sets the calling mode of virtual phone
-
-        Args:
-            calling_mode: calling mode of virtual phone
-
-        Returns:
-            None
-        """
-        cmd = "VPCALLINGMODE {}".format(calling_mode)
-        self._anritsu.send_command(cmd)
-
-    def set_voice_off_hook(self):
-        """ Set the virtual phone operating mode to Voice Off Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 0"
-        return self._anritsu.send_command(cmd)
-
-    def set_voice_on_hook(self):
-        """ Set the virtual phone operating mode to Voice On Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 1"
-        return self._anritsu.send_command(cmd)
-
-    def set_video_off_hook(self):
-        """ Set the virtual phone operating mode to Video Off Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 2"
-        return self._anritsu.send_command(cmd)
-
-    def set_video_on_hook(self):
-        """ Set the virtual phone operating mode to Video On Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 3"
-        return self._anritsu.send_command(cmd)
-
-    def set_call_waiting(self):
-        """ Set the virtual phone operating mode to Call waiting
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 4"
-        return self._anritsu.send_command(cmd)
-
-    @property
-    def status(self):
-        """ Gets the virtual phone status
-
-        Args:
-            None
-
-        Returns:
-            virtual phone status
-        """
-        cmd = "VPSTAT?"
-        status = self._anritsu.send_query(cmd)
-        return _VP_STATUS[status]
-
-    def sendSms(self, phoneNumber, message):
-        """ Sends the SMS data from Anritsu to UE
-
-        Args:
-            phoneNumber: sender of SMS
-            message: message text
-
-        Returns:
-            None
-        """
-        cmd = ("SENDSMS /?PhoneNumber=001122334455&Sender={}&Text={}"
-               "&DCS=00").format(phoneNumber, AnritsuUtils.gsm_encode(message))
-        return self._anritsu.send_command(cmd)
-
-    def sendSms_c2k(self, phoneNumber, message):
-        """ Sends the SMS data from Anritsu to UE (in CDMA)
-
-        Args:
-            phoneNumber: sender of SMS
-            message: message text
-
-        Returns:
-            None
-        """
-        cmd = ("C2KSENDSMS System=CDMA\&Originating_Address={}\&UserData={}"
-               ).format(phoneNumber, AnritsuUtils.cdma_encode(message))
-        return self._anritsu.send_command(cmd)
-
-    def receiveSms(self):
-        """ Receives SMS messages sent by the UE in an external application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        return self._anritsu.send_query("RECEIVESMS?")
-
-    def receiveSms_c2k(self):
-        """ Receives SMS messages sent by the UE(in CDMA) in an external application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        return self._anritsu.send_query("C2KRECEIVESMS?")
-
-    def setSmsStatusReport(self, status):
-        """ Set the Status Report value of the SMS
-
-        Args:
-            status: status code
-
-        Returns:
-            None
-        """
-        cmd = "SMSSTATUSREPORT {}".format(status)
-        return self._anritsu.send_command(cmd)
-
-
-class _PacketDataNetwork(object):
-    '''Class to configure PDN parameters'''
-    def __init__(self, anritsu, pdnnumber):
-        self._pdn_number = pdnnumber
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    # Default Gateway Selection
-    @property
-    def pdn_DG_selection(self):
-        """ Gets the default gateway for the PDN
-
-        Args:
-          None
-
-        Returns:
-          Current UE status
-        """
-        cmd = "PDNDEFAULTGATEWAY? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_DG_selection.setter
-    def pdn_DG_selection(self, selection):
-        """ Sets the default gateway selection for the PDN
-
-        Args:
-          Selection: COMMON or USER
-
-        Returns:
-          None
-        """
-        cmd = "PDNDEFAULTGATEWAY {},{}".format(self._pdn_number, selection)
-        self._anritsu.send_command(cmd)
-
-    # PDN specific Default Gateway:
-    @property
-    def pdn_gateway_ipv4addr(self):
-        """ Gets the IPv4 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        cmd = "PDNDGIPV4? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_gateway_ipv4addr.setter
-    def pdn_gateway_ipv4addr(self, ipv4_addr):
-        """ sets the IPv4 address of the default gateway
-
-        Args:
-            ipv4_addr: IPv4 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "PDNDGIPV4 {},{}".format(self._pdn_number, ipv4_addr)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_gateway_ipv6addr(self):
-        """ Gets the IPv6 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        cmd = "PDNDGIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_gateway_ipv6addr.setter
-    def pdn_gateway_ipv6addr(self, ipv6_addr):
-        """ sets the IPv6 address of the default gateway
-
-        Args:
-            ipv6_addr: IPv6 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "PDNDGIPV6 {},{}".format(self._pdn_number, ipv6_addr)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ue_address_iptype(self):
-        """ Gets IP type of UE for particular PDN
-
-        Args:
-            None
-
-        Returns:
-            IP type of UE for particular PDN
-        """
-        cmd = "PDNIPTYPE? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @ue_address_iptype.setter
-    def ue_address_iptype(self, ip_type):
-        """ Set IP type of UE for particular PDN
-
-        Args:
-            ip_type: IP type of UE
-
-        Returns:
-            None
-        """
-        if not isinstance(ip_type, IPAddressType):
-            raise ValueError(
-                ' The parameter should be of type "IPAddressType"')
-        cmd = "PDNIPTYPE {},{}".format(self._pdn_number, ip_type.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ue_address_ipv4(self):
-        """ Gets UE IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            UE IPv4 address
-        """
-        cmd = "PDNIPV4? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @ue_address_ipv4.setter
-    def ue_address_ipv4(self, ip_address):
-        """ Set UE IPv4 address
-
-        Args:
-            ip_address: UE IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNIPV4 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ue_address_ipv6(self):
-        """ Gets UE IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            UE IPv6 address
-        """
-        cmd = "PDNIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @ue_address_ipv6.setter
-    def ue_address_ipv6(self, ip_address):
-        """ Set UE IPv6 address
-
-        Args:
-            ip_address: UE IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNIPV6 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def primary_dns_address_ipv4(self):
-        """ Gets Primary DNS server IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            Primary DNS server IPv4 address
-        """
-        cmd = "PDNDNSIPV4PRI? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @primary_dns_address_ipv4.setter
-    def primary_dns_address_ipv4(self, ip_address):
-        """ Set Primary DNS server IPv4 address
-
-        Args:
-            ip_address: Primary DNS server IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNDNSIPV4PRI {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def secondary_dns_address_ipv4(self):
-        """ Gets secondary DNS server IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            secondary DNS server IPv4 address
-        """
-        cmd = "PDNDNSIPV4SEC? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @secondary_dns_address_ipv4.setter
-    def secondary_dns_address_ipv4(self, ip_address):
-        """ Set secondary DNS server IPv4 address
-
-        Args:
-            ip_address: secondary DNS server IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNDNSIPV4SEC {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dns_address_ipv6(self):
-        """ Gets DNS server IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            DNS server IPv6 address
-        """
-        cmd = "PDNDNSIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @dns_address_ipv6.setter
-    def dns_address_ipv6(self, ip_address):
-        """ Set DNS server IPv6 address
-
-        Args:
-            ip_address: DNS server IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNDNSIPV6 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv4(self):
-        """ Gets Secondary P-CSCF IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            Secondary P-CSCF IPv4 address
-        """
-        cmd = "PDNPCSCFIPV4? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv4.setter
-    def cscf_address_ipv4(self, ip_address):
-        """ Set Secondary P-CSCF IPv4 address
-
-        Args:
-            ip_address: Secondary P-CSCF IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNPCSCFIPV4 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv6(self):
-        """ Gets P-CSCF IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            P-CSCF IPv6 address
-        """
-        cmd = "PDNPCSCFIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv6.setter
-    def cscf_address_ipv6(self, ip_address):
-        """ Set P-CSCF IPv6 address
-
-        Args:
-            ip_address: P-CSCF IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNPCSCFIPV6 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_ims(self):
-        """ Get PDN IMS VNID binding status
-
-        Args:
-            None
-
-        Returns:
-            PDN IMS VNID binding status
-        """
-        cmd = "PDNIMS? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_ims.setter
-    def pdn_ims(self, switch):
-        """ Set PDN IMS VNID binding Enable/Disable
-
-        Args:
-            switch: "ENABLE/DISABLE"
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type'
-                             ' "Switch", ie, ENABLE or DISABLE ')
-        cmd = "PDNIMS {},{}".format(self._pdn_number, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_vnid(self):
-        """ Get PDN IMS VNID
-
-        Args:
-            None
-
-        Returns:
-            PDN IMS VNID
-        """
-        cmd = "PDNVNID? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_vnid.setter
-    def pdn_vnid(self, vnid):
-        """ Set PDN IMS VNID
-
-        Args:
-            vnid: 1~99
-
-        Returns:
-            None
-        """
-        cmd = "PDNVNID {},{}".format(self._pdn_number, vnid)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_apn_name(self):
-        """ Get PDN APN NAME
-
-        Args:
-            None
-
-        Returns:
-            PDN APN NAME
-        """
-        cmd = "PDNCHECKAPN? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_apn_name.setter
-    def pdn_apn_name(self, name):
-        """ Set PDN APN NAME
-
-        Args:
-            name: fast.t-mobile.com, ims
-
-        Returns:
-            None
-        """
-        cmd = "PDNCHECKAPN {},{}".format(self._pdn_number, name)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_qci(self):
-        """ Get PDN QCI Value
-
-        Args:
-            None
-
-        Returns:
-            PDN QCI Value
-        """
-        cmd = "PDNQCIDEFAULT? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_qci.setter
-    def pdn_qci(self, qci_value):
-        """ Set PDN QCI Value
-
-        Args:
-            qci_value: 5, 9
-
-        Returns:
-            None
-        """
-        cmd = "PDNQCIDEFAULT {},{}".format(self._pdn_number, qci_value)
-        self._anritsu.send_command(cmd)
-
-
-class _TriggerMessage(object):
-    '''Class to interact with trigger message handling supported by MD8475 '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    def set_reply_type(self, message_id, reply_type):
-        """ Sets the reply type of the trigger information
-
-        Args:
-            message_id: trigger information message Id
-            reply_type: reply type of the trigger information
-
-        Returns:
-            None
-        """
-        if not isinstance(message_id, TriggerMessageIDs):
-            raise ValueError(' The parameter should be of type'
-                             ' "TriggerMessageIDs"')
-        if not isinstance(reply_type, TriggerMessageReply):
-            raise ValueError(' The parameter should be of type'
-                             ' "TriggerMessageReply"')
-
-        cmd = "REJECTTYPE {},{}".format(message_id.value, reply_type.value)
-        self._anritsu.send_command(cmd)
-
-    def set_reject_cause(self, message_id, cause):
-        """ Sets the reject cause of the trigger information
-
-        Args:
-            message_id: trigger information message Id
-            cause: cause for reject
-
-        Returns:
-            None
-        """
-        if not isinstance(message_id, TriggerMessageIDs):
-            raise ValueError(' The parameter should be of type'
-                             ' "TriggerMessageIDs"')
-
-        cmd = "REJECTCAUSE {},{}".format(message_id.value, cause)
-        self._anritsu.send_command(cmd)
-
-
-class _IMS_Services(object):
-    '''Class to configure and operate IMS Services'''
-    def __init__(self, anritsu, vnid):
-        self._vnid = vnid
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def sync(self):
-        """ Gets Sync Enable status
-
-        Args:
-            None
-
-        Returns:
-            VNID Sync Enable status
-        """
-        cmd = "IMSSYNCENABLE? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @sync.setter
-    def sync(self, switch):
-        """ Set Sync Enable or Disable
-
-        Args:
-            sync: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSSYNCENABLE {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv4(self):
-        """ Gets CSCF IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            CSCF IPv4 address
-        """
-        cmd = "IMSCSCFIPV4? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv4.setter
-    def cscf_address_ipv4(self, ip_address):
-        """ Set CSCF IPv4 address
-
-        Args:
-            ip_address: CSCF IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFIPV4 {},{}".format(self._vnid, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv6(self):
-        """ Gets CSCF IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            CSCF IPv6 address
-        """
-        cmd = "IMSCSCFIPV6? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv6.setter
-    def cscf_address_ipv6(self, ip_address):
-        """ Set CSCF IPv6 address
-
-        Args:
-            ip_address: CSCF IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFIPV6 {},{}".format(self._vnid, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def imscscf_iptype(self):
-        """ Gets CSCF IP Type
-
-        Args:
-            None
-
-        Returns:
-            CSCF IP Type
-        """
-        cmd = "IMSCSCFIPTYPE? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @imscscf_iptype.setter
-    def imscscf_iptype(self, iptype):
-        """ Set CSCF IP Type
-
-        Args:
-            iptype: IPV4, IPV6, IPV4V6
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFIPTYPE {},{}".format(self._vnid, iptype)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_monitoring_ua(self):
-        """ Get CSCF Monitoring UA URI
-
-        Args:
-            None
-
-        Returns:
-            CSCF Monitoring UA URI
-        """
-        cmd = "IMSCSCFUAURI? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_monitoring_ua.setter
-    def cscf_monitoring_ua(self, ua_uri):
-        """ Set CSCF Monitoring UA URI
-
-        Args:
-            ua_uri: CSCF Monitoring UA URI
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUAURI {},{}".format(self._vnid, ua_uri)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_host_name(self):
-        """ Get CSCF Host Name
-
-        Args:
-            None
-
-        Returns:
-            CSCF Host Name
-        """
-        cmd = "IMSCSCFNAME? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_host_name.setter
-    def cscf_host_name(self, host_name):
-        """ Set CSCF Host Name
-
-        Args:
-            host_name: CSCF Host Name
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFNAME {},{}".format(self._vnid, host_name)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_ims_authentication(self):
-        """ Get CSCF IMS Auth Value
-
-        Args:
-            None
-
-        Returns:
-            CSCF IMS Auth
-        """
-        cmd = "IMSCSCFAUTH? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_ims_authentication.setter
-    def cscf_ims_authentication(self, on_off):
-        """ Set CSCF IMS Auth Value
-
-        Args:
-            on_off: CSCF IMS Auth ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFAUTH {},{}".format(self._vnid, on_off)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_precondition(self):
-        """ Get CSCF IMS Precondition
-
-        Args:
-            None
-
-        Returns:
-            CSCF IMS Precondition
-        """
-        cmd = "IMSCSCFPRECONDITION? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_precondition.setter
-    def cscf_precondition(self, on_off):
-        """ Set CSCF IMS Precondition
-
-        Args:
-            on_off: CSCF IMS Precondition ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFPRECONDITION {},{}".format(self._vnid, on_off)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_virtual_ua(self):
-        """ Get CSCF Virtual UA URI
-
-        Args:
-            None
-
-        Returns:
-            CSCF Virtual UA URI
-        """
-        cmd = "IMSCSCFVUAURI? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_virtual_ua.setter
-    def cscf_virtual_ua(self, ua_uri):
-        """ Set CSCF Virtual UA URI
-
-        Args:
-            ua_uri: CSCF Virtual UA URI
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFVUAURI {},{}".format(self._vnid, ua_uri)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tmo_cscf_userslist_add(self):
-        """ Get CSCF USERLIST
-
-        Args:
-            None
-
-        Returns:
-            CSCF USERLIST
-        """
-        cmd = "IMSCSCFUSERSLIST? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @tmo_cscf_userslist_add.setter
-    def tmo_cscf_userslist_add(self, username):
-        """ Set CSCF USER to USERLIST
-            This is needed if IMS AUTH is enabled
-
-        Args:
-            username: CSCF Username
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUSERSLISTADD {},{},00112233445566778899AABBCCDDEEFF,TS34108,AKAV1_MD5,\
-        OPC,00000000000000000000000000000000,8000,TRUE,FALSE,0123456789ABCDEF0123456789ABCDEF,\
-        54CDFEAB9889000001326754CDFEAB98,6754CDFEAB9889BAEFDC457623100132,\
-        326754CDFEAB9889BAEFDC4576231001,TRUE,TRUE,TRUE".format(
-            self._vnid, username)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def fi_cscf_userslist_add(self):
-        """ Get CSCF USERLIST
-
-        Args:
-            None
-
-        Returns:
-            CSCF USERLIST
-        """
-        cmd = "IMSCSCFUSERSLIST? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @fi_cscf_userslist_add.setter
-    def fi_cscf_userslist_add(self, username):
-        """ Set CSCF USER to USERLIST
-            This is needed if IMS AUTH is enabled
-
-        Args:
-            username: CSCF Username
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUSERSLISTADD {},{},00112233445566778899AABBCCDDEEFF,TS34108,AKAV1_MD5,\
-        OPC,00000000000000000000000000000000,8000,TRUE,FALSE,0123456789ABCDEF0123456789ABCDEF,\
-        54CDFEAB9889000001326754CDFEAB98,6754CDFEAB9889BAEFDC457623100132,\
-        326754CDFEAB9889BAEFDC4576231001,TRUE,TRUE,TRUE".format(
-            self._vnid, username)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def vzw_cscf_userslist_add(self):
-        """ Get CSCF USERLIST
-
-        Args:
-            None
-
-        Returns:
-            CSCF USERLIST
-        """
-        cmd = "IMSCSCFUSERSLIST? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @vzw_cscf_userslist_add.setter
-    def vzw_cscf_userslist_add(self, username):
-        """ Set CSCF USER to USERLIST
-            This is needed if IMS AUTH is enabled
-
-        Args:
-            username: CSCF Username
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUSERSLISTADD {},{},465B5CE8B199B49FAA5F0A2EE238A6BC,MILENAGE,AKAV1_MD5,\
-        OP,5F1D289C5D354D0A140C2548F5F3E3BA,8000,TRUE,FALSE,0123456789ABCDEF0123456789ABCDEF,\
-        54CDFEAB9889000001326754CDFEAB98,6754CDFEAB9889BAEFDC457623100132,\
-        326754CDFEAB9889BAEFDC4576231001,TRUE,TRUE,TRUE".format(
-            self._vnid, username)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dns(self):
-        """ Gets DNS Enable status
-
-        Args:
-            None
-
-        Returns:
-            VNID DNS Enable status
-        """
-        cmd = "IMSDNS? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @dns.setter
-    def dns(self, switch):
-        """ Set DNS Enable or Disable
-
-        Args:
-            sync: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSDNS {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ndp_nic(self):
-        """ Gets NDP Network Interface name
-
-        Args:
-            None
-
-        Returns:
-            NDP NIC name
-        """
-        cmd = "IMSNDPNIC? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @ndp_nic.setter
-    def ndp_nic(self, nic_name):
-        """ Set NDP Network Interface name
-
-        Args:
-            nic_name: NDP Network Interface name
-
-        Returns:
-            None
-        """
-        cmd = "IMSNDPNIC {},{}".format(self._vnid, nic_name)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ndp_prefix(self):
-        """ Gets NDP IPv6 Prefix
-
-        Args:
-            None
-
-        Returns:
-            NDP IPv6 Prefix
-        """
-        cmd = "IMSNDPPREFIX? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @ndp_prefix.setter
-    def ndp_prefix(self, prefix_addr):
-        """ Set NDP IPv6 Prefix
-
-        Args:
-            prefix_addr: NDP IPV6 Prefix Addr
-
-        Returns:
-            None
-        """
-        cmd = "IMSNDPPREFIX {},{},64".format(self._vnid, prefix_addr)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def psap(self):
-        """ Gets PSAP Enable status
-
-        Args:
-            None
-
-        Returns:
-            VNID PSAP Enable status
-        """
-        cmd = "IMSPSAP? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @psap.setter
-    def psap(self, switch):
-        """ Set PSAP Enable or Disable
-
-        Args:
-            switch: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSPSAP {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def psap_auto_answer(self):
-        """ Gets PSAP Auto Answer status
-
-        Args:
-            None
-
-        Returns:
-            VNID PSAP Auto Answer status
-        """
-        cmd = "IMSPSAPAUTOANSWER? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @psap_auto_answer.setter
-    def psap_auto_answer(self, switch):
-        """ Set PSAP Auto Answer Enable or Disable
-
-        Args:
-            switch: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSPSAPAUTOANSWER {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    def start_virtual_network(self):
-        """ Start the specified Virtual Network (IMS service)
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "IMSSTARTVN " + self._vnid
-        return self._anritsu.send_command(cmd)
diff --git a/src/antlion/controllers/anritsu_lib/mg3710a.py b/src/antlion/controllers/anritsu_lib/mg3710a.py
deleted file mode 100644
index 9d6c559..0000000
--- a/src/antlion/controllers/anritsu_lib/mg3710a.py
+++ /dev/null
@@ -1,715 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Controller interface for Anritsu Signal Generator MG3710A.
-"""
-
-import logging
-import socket
-
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuError
-from antlion.controllers.anritsu_lib._anritsu_utils import NO_ERROR
-from antlion.controllers.anritsu_lib._anritsu_utils import OPERATION_COMPLETE
-
-from antlion import tracelogger
-
-TERMINATOR = "\n"
-
-
-def create(configs):
-    objs = []
-    for c in configs:
-        ip_address = c["ip_address"]
-        objs.append(MG3710A(ip_address))
-    return objs
-
-
-def destroy(objs):
-    return
-
-
-class MG3710A(object):
-    """Class to communicate with Anritsu Signal Generator MG3710A.
-       This uses GPIB command to interface with Anritsu MG3710A """
-
-    def __init__(self, ip_address):
-        self._ipaddr = ip_address
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-
-        # Open socket connection to Signaling Tester
-        self.log.info("Opening Socket Connection with "
-                      "Signal Generator MG3710A ({}) ".format(self._ipaddr))
-        try:
-            self._sock = socket.create_connection((self._ipaddr, 49158),
-                                                  timeout=30)
-            self.send_query("*IDN?", 60)
-            self.log.info("Communication Signal Generator MG3710A OK.")
-            self.log.info("Opened Socket connection to ({})"
-                          "with handle ({})".format(self._ipaddr, self._sock))
-        except socket.timeout:
-            raise AnritsuError("Timeout happened while conencting to"
-                               " Anritsu MG3710A")
-        except socket.error:
-            raise AnritsuError("Socket creation error")
-
-    def disconnect(self):
-        """ Disconnect Signal Generator MG3710A
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command(":SYST:COMM:GTL", opc=False)
-        self._sock.close()
-
-    def send_query(self, query, sock_timeout=10):
-        """ Sends a Query message to Anritsu MG3710A and return response
-
-        Args:
-            query - Query string
-
-        Returns:
-            query response
-        """
-        self.log.info("--> {}".format(query))
-        querytoSend = (query + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(querytoSend)
-            result = self._sock.recv(256).rstrip(TERMINATOR.encode('utf-8'))
-            response = result.decode('utf-8')
-            self.log.info('<-- {}'.format(response))
-            return response
-        except socket.timeout:
-            raise AnritsuError("Timeout: Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error")
-
-    def send_command(self, command, sock_timeout=30, opc=True):
-        """ Sends a Command message to Anritsu MG3710A
-
-        Args:
-            command - command string
-
-        Returns:
-            None
-        """
-        self.log.info("--> {}".format(command))
-        cmdToSend = (command + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(cmdToSend)
-            if opc:
-                # check operation status
-                status = self.send_query("*OPC?")
-                if int(status) != OPERATION_COMPLETE:
-                    raise AnritsuError("Operation not completed")
-        except socket.timeout:
-            raise AnritsuError("Timeout for Command Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error for Anritsu command")
-        return
-
-    @property
-    def sg(self):
-        """ Gets current selected signal generator(SG)
-
-        Args:
-            None
-
-        Returns:
-            selected signal generatr number
-        """
-        return self.send_query("PORT?")
-
-    @sg.setter
-    def sg(self, sg_number):
-        """ Selects the signal generator to be controlled
-
-        Args:
-            sg_number: sg number 1 | 2
-
-        Returns:
-            None
-        """
-        cmd = "PORT {}".format(sg_number)
-        self.send_command(cmd)
-
-    def get_modulation_state(self, sg=1):
-        """ Gets the RF signal modulation state (ON/OFF) of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            modulation state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("OUTP{}:MOD?".format(sg))
-
-    def set_modulation_state(self, state, sg=1):
-        """ Sets the RF signal modulation state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "OUTP{}:MOD {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_rf_output_state(self, sg=1):
-        """ Gets RF signal output state (ON/OFF) of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            RF signal output state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("OUTP{}?".format(sg))
-
-    def set_rf_output_state(self, state, sg=1):
-        """ Sets the RF signal output state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "OUTP{} {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_frequency(self, sg=1):
-        """ Gets the selected frequency of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            selected frequency
-        """
-        return self.send_query("SOUR{}:FREQ?".format(sg))
-
-    def set_frequency(self, freq, sg=1):
-        """ Sets the frequency of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            freq : frequency
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ {}".format(sg, freq)
-        self.send_command(cmd)
-
-    def get_frequency_offset_state(self, sg=1):
-        """ Gets the Frequency Offset enable state (ON/OFF) of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            Frequency Offset enable state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:FREQ:OFFS:STAT?".format(sg))
-
-    def set_frequency_offset_state(self, state, sg=1):
-        """ Sets the Frequency Offset enable state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : enable state, ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:OFFS:STAT {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_frequency_offset(self, sg=1):
-        """ Gets the current frequency offset value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current frequency offset value
-        """
-        return self.send_query("SOUR{}:FREQ:OFFS?".format(sg))
-
-    def set_frequency_offset(self, offset, sg=1):
-        """ Sets the frequency offset value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            offset : frequency offset value
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:OFFS {}".format(sg, offset)
-        self.send_command(cmd)
-
-    def get_frequency_offset_multiplier_state(self, sg=1):
-        """ Gets the Frequency Offset multiplier enable state (ON/OFF) of
-            signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            Frequency Offset  multiplier enable state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:FREQ:MULT:STAT?".format(sg))
-
-    def set_frequency_offset_multiplier_state(self, state, sg=1):
-        """ Sets the  Frequency Offset multiplier enable state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : enable state, ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:MULT:STAT {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_frequency_offset_multiplier(self, sg=1):
-        """ Gets the current frequency offset multiplier value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency offset multiplier value
-        """
-        return self.send_query("SOUR{}:FREQ:MULT?".format(sg))
-
-    def set_frequency_offset_multiplier(self, multiplier, sg=1):
-        """ Sets the frequency offset multiplier value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            multiplier : frequency offset multiplier value
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:MULT {}".format(sg, multiplier)
-        self.send_command(cmd)
-
-    def get_channel(self, sg=1):
-        """ Gets the current channel number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current channel number
-        """
-        return self.send_query("SOUR{}:FREQ:CHAN:NUMB?".format(sg))
-
-    def set_channel(self, channel, sg=1):
-        """ Sets the channel number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            channel : channel number
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:CHAN:NUMB {}".format(sg, channel)
-        self.send_command(cmd)
-
-    def get_channel_group(self, sg=1):
-        """ Gets the current channel group number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current channel group number
-        """
-        return self.send_query("SOUR{}:FREQ:CHAN:GRO?".format(sg))
-
-    def set_channel_group(self, group, sg=1):
-        """ Sets the channel group number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            group : channel group number
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:CHAN:GRO {}".format(sg, group)
-        self.send_command(cmd)
-
-    def get_rf_output_level(self, sg=1):
-        """ Gets the current RF output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current RF output level
-        """
-        return self.send_query("SOUR{}:POW:CURR?".format(sg))
-
-    def get_output_level_unit(self, sg=1):
-        """ Gets the current RF output level unit
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current RF output level unit
-        """
-        return self.send_query("UNIT{}:POW?".format(sg))
-
-    def set_output_level_unit(self, unit, sg=1):
-        """ Sets the RF output level unit
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            unit : Output level unit
-
-        Returns:
-            None
-        """
-        cmd = "UNIT{}:POW {}".format(sg, unit)
-        self.send_command(cmd)
-
-    def get_output_level(self, sg=1):
-        """ Gets the Output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            Output level
-        """
-        return self.send_query("SOUR{}:POW?".format(sg))
-
-    def set_output_level(self, level, sg=1):
-        """ Sets the Output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            level : Output level
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:POW {}".format(sg, level)
-        self.send_command(cmd)
-
-    def get_arb_state(self, sg=1):
-        """ Gets the ARB function state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            ARB function state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:RAD:ARB?".format(sg))
-
-    def set_arb_state(self, state, sg=1):
-        """ Sets the ARB function state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : enable state (ON/OFF)
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB {}".format(sg, state)
-        self.send_command(cmd)
-
-    def restart_arb_waveform_pattern(self, sg=1):
-        """ playback the waveform pattern from the beginning.
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WAV:REST".format(sg)
-        self.send_command(cmd)
-
-    def load_waveform(self, package_name, pattern_name, memory, sg=1):
-        """ loads the waveform from HDD to specified memory
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            package_name : Package name of signal
-            pattern_name : Pattern name of signal
-            memory: memory for the signal - "A" or "B"
-
-        Returns:
-            None
-        """
-        cmd = "MMEM{}:LOAD:WAV:WM{} '{}','{}'".format(sg, memory, package_name,
-                                                      pattern_name)
-        self.send_command(cmd)
-
-    def select_waveform(self, package_name, pattern_name, memory, sg=1):
-        """ Selects the waveform to output on specified memory
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            package_name : Package name of signal
-            pattern_name : Pattern name of signal
-            memory: memory for the signal - "A" or "B"
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:WAV '{}','{}'".format(
-            sg, memory, package_name, pattern_name)
-        self.send_command(cmd)
-
-    def get_freq_relative_display_status(self, sg=1):
-        """ Gets the frequency relative display status
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency relative display status.   0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:FREQ:REF:STAT?".format(sg))
-
-    def set_freq_relative_display_status(self, enable, sg=1):
-        """ Sets frequency relative display status
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            enable : enable type (ON/OFF)
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:REF:STAT {}".format(sg, enable)
-        self.send_command(cmd)
-
-    def get_freq_channel_display_type(self, sg=1):
-        """ Gets the selected type(frequency/channel) for input display
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            selected type(frequecy/channel) for input display
-        """
-        return self.send_query("SOUR{}:FREQ:TYPE?".format(sg))
-
-    def set_freq_channel_display_type(self, freq_channel, sg=1):
-        """ Sets thes type(frequency/channel) for input display
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            freq_channel : display type (frequency/channel)
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:TYPE {}".format(sg, freq_channel)
-        self.send_command(cmd)
-
-    def get_arb_combination_mode(self, sg=1):
-        """ Gets the current mode to generate the pattern
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current mode to generate the pattern
-        """
-        return self.send_query("SOUR{}:RAD:ARB:PCOM?".format(sg))
-
-    def set_arb_combination_mode(self, mode, sg=1):
-        """ Sets the mode to generate the pattern
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            mode : pattern generation mode
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:PCOM {}".format(sg, mode)
-        self.send_command(cmd)
-
-    def get_arb_pattern_aorb_state(self, a_or_b, sg=1):
-        """ Gets the Pattern A/B output state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-
-        Returns:
-            Pattern A/B output state . 0(OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:RAD:ARB:WM{}:OUTP?".format(a_or_b, sg))
-
-    def set_arb_pattern_aorb_state(self, a_or_b, state, sg=1):
-        """ Sets the Pattern A/B output state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-            state : output state
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:OUTP {}".format(sg, a_or_b, state)
-        self.send_command(cmd)
-
-    def get_arb_level_aorb(self, a_or_b, sg=1):
-        """ Gets the Pattern A/B output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-
-        Returns:
-             Pattern A/B output level
-        """
-        return self.send_query("SOUR{}:RAD:ARB:WM{}:POW?".format(sg, a_or_b))
-
-    def set_arb_level_aorb(self, a_or_b, level, sg=1):
-        """ Sets the Pattern A/B output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-            level : output level
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:POW {}".format(sg, a_or_b, level)
-        self.send_command(cmd)
-
-    def get_arb_freq_offset(self, sg=1):
-        """ Gets the frequency offset between Pattern A and Patten B
-            when CenterSignal is A or B.
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency offset between Pattern A and Patten B
-        """
-        return self.send_query("SOUR{}:RAD:ARB:FREQ:OFFS?".format(sg))
-
-    def set_arb_freq_offset(self, offset, sg=1):
-        """ Sets the frequency offset between Pattern A and Patten B when
-            CenterSignal is A or B.
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            offset : frequency offset
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:FREQ:OFFS {}".format(sg, offset)
-        self.send_command(cmd)
-
-    def get_arb_freq_offset_aorb(self, sg=1):
-        """ Gets the frequency offset of Pattern A/Pattern B based on Baseband
-            center frequency
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency offset
-        """
-        return self.send_query("SOUR{}:RAD:ARB:WM{}:FREQ:OFFS?".format(
-            sg, a_or_b))
-
-    def set_arb_freq_offset_aorb(self, a_or_b, offset, sg=1):
-        """ Sets the frequency offset of Pattern A/Pattern B based on Baseband
-            center frequency
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-            offset : frequency offset
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:FREQ:OFFS {}".format(sg, a_or_b, offset)
-        self.send_command(cmd)
diff --git a/src/antlion/controllers/ap_lib/ap_get_interface.py b/src/antlion/controllers/ap_lib/ap_get_interface.py
index ddd6987..74a6d2c 100644
--- a/src/antlion/controllers/ap_lib/ap_get_interface.py
+++ b/src/antlion/controllers/ap_lib/ap_get_interface.py
@@ -15,11 +15,17 @@
 # limitations under the License.
 
 import logging
+
+from typing import List, Optional, Tuple, TYPE_CHECKING
+
 from antlion.libs.proc import job
 
-GET_ALL_INTERFACE = 'ls /sys/class/net'
-GET_VIRTUAL_INTERFACE = 'ls /sys/devices/virtual/net'
-BRCTL_SHOW = 'brctl show'
+if TYPE_CHECKING:
+    from antlion.controllers.access_point import AccessPoint
+
+GET_ALL_INTERFACE = "ls /sys/class/net"
+GET_VIRTUAL_INTERFACE = "ls /sys/devices/virtual/net"
+BRCTL_SHOW = "brctl show"
 
 
 class ApInterfacesError(Exception):
@@ -27,10 +33,11 @@
 
 
 class ApInterfaces(object):
-    """Class to get network interface information for the device.
+    """Class to get network interface information for the device."""
 
-    """
-    def __init__(self, ap, wan_interface_override=None):
+    def __init__(
+        self, ap: "AccessPoint", wan_interface_override: Optional[str] = None
+    ) -> None:
         """Initialize the ApInterface class.
 
         Args:
@@ -40,29 +47,29 @@
         self.ssh = ap.ssh
         self.wan_interface_override = wan_interface_override
 
-    def get_all_interface(self):
+    def get_all_interface(self) -> List[str]:
         """Get all network interfaces on the device.
 
         Returns:
             interfaces_all: list of all the network interfaces on device
         """
         output = self.ssh.run(GET_ALL_INTERFACE)
-        interfaces_all = output.stdout.split('\n')
+        interfaces_all = output.stdout.split("\n")
 
         return interfaces_all
 
-    def get_virtual_interface(self):
+    def get_virtual_interface(self) -> List[str]:
         """Get all virtual interfaces on the device.
 
         Returns:
             interfaces_virtual: list of all the virtual interfaces on device
         """
         output = self.ssh.run(GET_VIRTUAL_INTERFACE)
-        interfaces_virtual = output.stdout.split('\n')
+        interfaces_virtual = output.stdout.split("\n")
 
         return interfaces_virtual
 
-    def get_physical_interface(self):
+    def get_physical_interface(self) -> List[str]:
         """Get all the physical interfaces of the device.
 
         Get all physical interfaces such as eth ports and wlan ports
@@ -75,7 +82,7 @@
 
         return interfaces_phy
 
-    def get_bridge_interface(self):
+    def get_bridge_interface(self) -> Optional[List[str]]:
         """Get all the bridge interfaces of the device.
 
         Returns:
@@ -85,17 +92,16 @@
         interfaces_bridge = []
         try:
             output = self.ssh.run(BRCTL_SHOW)
-            lines = output.stdout.split('\n')
+            lines = output.stdout.split("\n")
             for line in lines:
-                interfaces_bridge.append(line.split('\t')[0])
+                interfaces_bridge.append(line.split("\t")[0])
             interfaces_bridge.pop(0)
-            interfaces_bridge = [x for x in interfaces_bridge if x != '']
-            return interfaces_bridge
+            return [x for x in interfaces_bridge if x != ""]
         except job.Error:
-            logging.info('No brctl utility is available')
+            logging.info("No brctl utility is available")
             return None
 
-    def get_wlan_interface(self):
+    def get_wlan_interface(self) -> Tuple[str, str]:
         """Get all WLAN interfaces and specify 2.4 GHz and 5 GHz interfaces.
 
         Returns:
@@ -107,21 +113,18 @@
         wlan_5g = None
         interfaces_phy = self.get_physical_interface()
         for iface in interfaces_phy:
-            IW_LIST_FREQ = 'iwlist %s freq' % iface
-            output = self.ssh.run(IW_LIST_FREQ)
-            if 'Channel 06' in output.stdout and 'Channel 36' not in output.stdout:
+            output = self.ssh.run(f"iwlist {iface} freq")
+            if "Channel 06" in output.stdout and "Channel 36" not in output.stdout:
                 wlan_2g = iface
-            elif 'Channel 36' in output.stdout and 'Channel 06' not in output.stdout:
+            elif "Channel 36" in output.stdout and "Channel 06" not in output.stdout:
                 wlan_5g = iface
 
-        interfaces_wlan = [wlan_2g, wlan_5g]
+        if wlan_2g is None or wlan_5g is None:
+            raise ApInterfacesError("Missing at least one WLAN interface")
 
-        if None not in interfaces_wlan:
-            return interfaces_wlan
+        return (wlan_2g, wlan_5g)
 
-        raise ApInterfacesError('Missing at least one WLAN interface')
-
-    def get_wan_interface(self):
+    def get_wan_interface(self) -> str:
         """Get the WAN interface which has internet connectivity. If a wan
         interface is already specified return that instead.
 
@@ -145,13 +148,13 @@
         if wan:
             return wan
 
-        output = self.ssh.run('ifconfig')
-        interfaces_all = output.stdout.split('\n')
-        logging.info("IFCONFIG output = %s" % interfaces_all)
+        output = self.ssh.run("ifconfig")
+        interfaces_all = output.stdout.split("\n")
+        logging.info(f"IFCONFIG output = {interfaces_all}")
 
-        raise ApInterfacesError('No WAN interface available')
+        raise ApInterfacesError("No WAN interface available")
 
-    def get_lan_interface(self):
+    def get_lan_interface(self) -> Optional[str]:
         """Get the LAN interface connecting to local devices.
 
         Returns:
@@ -165,14 +168,13 @@
         interface_wan = self.get_wan_interface()
         interfaces_eth.remove(interface_wan)
         for iface in interfaces_eth:
-            LAN_CHECK = 'ifconfig %s' % iface
-            output = self.ssh.run(LAN_CHECK)
-            if 'RUNNING' in output.stdout:
+            output = self.ssh.run(f"ifconfig {iface}")
+            if "RUNNING" in output.stdout:
                 lan = iface
                 break
         return lan
 
-    def check_ping(self, iface):
+    def check_ping(self, iface: str) -> int:
         """Check the ping status on specific interface to determine the WAN.
 
         Args:
@@ -180,9 +182,8 @@
         Returns:
             network_status: the connectivity status of the interface
         """
-        PING = 'ping -c 3 -I %s 8.8.8.8' % iface
         try:
-            self.ssh.run(PING)
+            self.ssh.run(f"ping -c 3 -I {iface} 8.8.8.8")
             return 1
         except job.Error:
             return 0
diff --git a/src/antlion/controllers/ap_lib/ap_iwconfig.py b/src/antlion/controllers/ap_lib/ap_iwconfig.py
index 550f785..225a397 100644
--- a/src/antlion/controllers/ap_lib/ap_iwconfig.py
+++ b/src/antlion/controllers/ap_lib/ap_iwconfig.py
@@ -14,7 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion.libs.proc import job
+from typing import Optional, TYPE_CHECKING
+
+from antlion.libs.proc.job import Result
+
+if TYPE_CHECKING:
+    from antlion.controllers.access_point import AccessPoint
 
 
 class ApIwconfigError(Exception):
@@ -22,13 +27,11 @@
 
 
 class ApIwconfig(object):
-    """Class to configure wireless interface via iwconfig
+    """Class to configure wireless interface via iwconfig"""
 
-    """
+    PROGRAM_FILE = "/usr/local/sbin/iwconfig"
 
-    PROGRAM_FILE = '/usr/local/sbin/iwconfig'
-
-    def __init__(self, ap):
+    def __init__(self, ap: "AccessPoint") -> None:
         """Initialize the ApIwconfig class.
 
         Args:
@@ -36,14 +39,12 @@
         """
         self.ssh = ap.ssh
 
-    def ap_iwconfig(self, interface, arguments=None):
+    def ap_iwconfig(
+        self, interface: str, arguments: Optional[str] = None
+    ) -> Optional[Result]:
         """Configure the wireless interface using iwconfig.
 
         Returns:
             output: the output of the command, if any
         """
-        iwconfig_command = '%s %s %s' % (self.PROGRAM_FILE, interface,
-                                         arguments)
-        output = self.ssh.run(iwconfig_command)
-
-        return output
+        return self.ssh.run(f"{self.PROGRAM_FILE} {interface} {arguments}")
diff --git a/src/antlion/controllers/ap_lib/bridge_interface.py b/src/antlion/controllers/ap_lib/bridge_interface.py
index 2dd82d8..ee4733e 100644
--- a/src/antlion/controllers/ap_lib/bridge_interface.py
+++ b/src/antlion/controllers/ap_lib/bridge_interface.py
@@ -18,17 +18,16 @@
 import time
 from antlion.libs.proc import job
 
-_BRCTL = 'brctl'
-BRIDGE_NAME = 'br-lan'
-CREATE_BRIDGE = '%s addbr %s' % (_BRCTL, BRIDGE_NAME)
-DELETE_BRIDGE = '%s delbr %s' % (_BRCTL, BRIDGE_NAME)
-BRING_DOWN_BRIDGE = 'ifconfig %s down' % BRIDGE_NAME
+_BRCTL = "brctl"
+BRIDGE_NAME = "br-lan"
+CREATE_BRIDGE = "%s addbr %s" % (_BRCTL, BRIDGE_NAME)
+DELETE_BRIDGE = "%s delbr %s" % (_BRCTL, BRIDGE_NAME)
+BRING_DOWN_BRIDGE = "ifconfig %s down" % BRIDGE_NAME
 
 
 class BridgeInterfaceConfigs(object):
-    """Configs needed for creating bridge interface between LAN and WLAN.
+    """Configs needed for creating bridge interface between LAN and WLAN."""
 
-    """
     def __init__(self, iface_wlan, iface_lan, bridge_ip):
         """Set bridge interface configs based on the channel info.
 
@@ -43,9 +42,8 @@
 
 
 class BridgeInterface(object):
-    """Class object for bridge interface betwen WLAN and LAN
+    """Class object for bridge interface betwen WLAN and LAN"""
 
-    """
     def __init__(self, ap):
         """Initialize the BridgeInterface class.
 
@@ -62,40 +60,44 @@
             brconfigs: the bridge interface config, type BridgeInterfaceConfigs
         """
 
-        logging.info('Create bridge interface between LAN and WLAN')
+        logging.info("Create bridge interface between LAN and WLAN")
         # Create the bridge
         try:
             self.ssh.run(CREATE_BRIDGE)
         except job.Error:
             logging.warning(
-                'Bridge interface {} already exists, no action needed'.format(
-                    BRIDGE_NAME))
+                "Bridge interface {} already exists, no action needed".format(
+                    BRIDGE_NAME
+                )
+            )
 
         # Enable 4addr mode on for the wlan interface
-        ENABLE_4ADDR = 'iw dev %s set 4addr on' % (brconfigs.iface_wlan)
+        ENABLE_4ADDR = "iw dev %s set 4addr on" % (brconfigs.iface_wlan)
         try:
             self.ssh.run(ENABLE_4ADDR)
         except job.Error:
-            logging.warning('4addr is already enabled on {}'.format(
-                brconfigs.iface_wlan))
+            logging.warning(
+                "4addr is already enabled on {}".format(brconfigs.iface_wlan)
+            )
 
         # Add both LAN and WLAN interfaces to the bridge interface
         for interface in [brconfigs.iface_lan, brconfigs.iface_wlan]:
-            ADD_INTERFACE = '%s addif %s %s' % (_BRCTL, BRIDGE_NAME, interface)
+            ADD_INTERFACE = "%s addif %s %s" % (_BRCTL, BRIDGE_NAME, interface)
             try:
                 self.ssh.run(ADD_INTERFACE)
             except job.Error:
-                logging.warning('{} has already been added to {}'.format(
-                    interface, BRIDGE_NAME))
+                logging.warning(
+                    "{} has already been added to {}".format(interface, BRIDGE_NAME)
+                )
         time.sleep(5)
 
         # Set IP address on the bridge interface to bring it up
-        SET_BRIDGE_IP = 'ifconfig %s %s' % (BRIDGE_NAME, brconfigs.bridge_ip)
+        SET_BRIDGE_IP = "ifconfig %s %s" % (BRIDGE_NAME, brconfigs.bridge_ip)
         self.ssh.run(SET_BRIDGE_IP)
         time.sleep(2)
 
         # Bridge interface is up
-        logging.info('Bridge interface is up and running')
+        logging.info("Bridge interface is up and running")
 
     def teardown(self, brconfigs):
         """Tear down the bridge interface.
@@ -103,17 +105,17 @@
         Args:
             brconfigs: the bridge interface config, type BridgeInterfaceConfigs
         """
-        logging.info('Bringing down the bridge interface')
+        logging.info("Bringing down the bridge interface")
         # Delete the bridge interface
         self.ssh.run(BRING_DOWN_BRIDGE)
         time.sleep(1)
         self.ssh.run(DELETE_BRIDGE)
 
         # Bring down wlan interface and disable 4addr mode
-        BRING_DOWN_WLAN = 'ifconfig %s down' % brconfigs.iface_wlan
+        BRING_DOWN_WLAN = "ifconfig %s down" % brconfigs.iface_wlan
         self.ssh.run(BRING_DOWN_WLAN)
         time.sleep(2)
-        DISABLE_4ADDR = 'iw dev %s set 4addr off' % (brconfigs.iface_wlan)
+        DISABLE_4ADDR = "iw dev %s set 4addr off" % (brconfigs.iface_wlan)
         self.ssh.run(DISABLE_4ADDR)
         time.sleep(1)
-        logging.info('Bridge interface is down')
+        logging.info("Bridge interface is down")
diff --git a/src/antlion/controllers/ap_lib/dhcp_config.py b/src/antlion/controllers/ap_lib/dhcp_config.py
index 4cc7adf..a50b6d0 100644
--- a/src/antlion/controllers/ap_lib/dhcp_config.py
+++ b/src/antlion/controllers/ap_lib/dhcp_config.py
@@ -14,7 +14,7 @@
 
 import copy
 
-_ROUTER_DNS = '8.8.8.8, 4.4.4.4'
+_ROUTER_DNS = "8.8.8.8, 4.4.4.4"
 
 
 class Subnet(object):
@@ -30,14 +30,16 @@
         additional_options: A dictionary corresponding to DHCP options.
     """
 
-    def __init__(self,
-                 subnet,
-                 start=None,
-                 end=None,
-                 router=None,
-                 lease_time=None,
-                 additional_parameters={},
-                 additional_options={}):
+    def __init__(
+        self,
+        subnet,
+        start=None,
+        end=None,
+        router=None,
+        lease_time=None,
+        additional_parameters={},
+        additional_options={},
+    ):
         """
         Args:
             subnet: ipaddress.IPv4Network, The address space of the subnetwork
@@ -64,9 +66,9 @@
             self.start = self.network[2]
 
         if not self.start in self.network:
-            raise ValueError('The start range is not in the subnet.')
+            raise ValueError("The start range is not in the subnet.")
         if self.start.is_reserved:
-            raise ValueError('The start of the range cannot be reserved.')
+            raise ValueError("The start of the range cannot be reserved.")
 
         if end:
             self.end = end
@@ -74,18 +76,17 @@
             self.end = self.network[-2]
 
         if not self.end in self.network:
-            raise ValueError('The end range is not in the subnet.')
+            raise ValueError("The end range is not in the subnet.")
         if self.end.is_reserved:
-            raise ValueError('The end of the range cannot be reserved.')
+            raise ValueError("The end of the range cannot be reserved.")
         if self.end < self.start:
-            raise ValueError(
-                'The end must be an address larger than the start.')
+            raise ValueError("The end must be an address larger than the start.")
 
         if router:
             if router >= self.start and router <= self.end:
-                raise ValueError('Router must not be in pool range.')
+                raise ValueError("Router must not be in pool range.")
             if not router in self.network:
-                raise ValueError('Router must be in the given subnet.')
+                raise ValueError("Router must be in the given subnet.")
 
             self.router = router
         else:
@@ -102,13 +103,13 @@
                     break
 
             if not self.router:
-                raise ValueError('No useable host found.')
+                raise ValueError("No useable host found.")
 
         self.lease_time = lease_time
         self.additional_parameters = additional_parameters
         self.additional_options = additional_options
-        if 'domain-name-servers' not in self.additional_options:
-            self.additional_options['domain-name-servers'] = _ROUTER_DNS
+        if "domain-name-servers" not in self.additional_options:
+            self.additional_options["domain-name-servers"] = _ROUTER_DNS
 
 
 class StaticMapping(object):
@@ -137,14 +138,15 @@
         max_lease_time: The max time to allow a lease.
     """
 
-    def __init__(self,
-                 subnets=None,
-                 static_mappings=None,
-                 default_lease_time=600,
-                 max_lease_time=7200):
+    def __init__(
+        self,
+        subnets=None,
+        static_mappings=None,
+        default_lease_time=600,
+        max_lease_time=7200,
+    ):
         self.subnets = copy.deepcopy(subnets) if subnets else []
-        self.static_mappings = (copy.deepcopy(static_mappings)
-                                if static_mappings else [])
+        self.static_mappings = copy.deepcopy(static_mappings) if static_mappings else []
         self.default_lease_time = default_lease_time
         self.max_lease_time = max_lease_time
 
@@ -155,9 +157,9 @@
         lines = []
 
         if self.default_lease_time:
-            lines.append('default-lease-time %d;' % self.default_lease_time)
+            lines.append("default-lease-time %d;" % self.default_lease_time)
         if self.max_lease_time:
-            lines.append('max-lease-time %s;' % self.max_lease_time)
+            lines.append("max-lease-time %s;" % self.max_lease_time)
 
         for subnet in self.subnets:
             address = subnet.network.network_address
@@ -169,35 +171,35 @@
             additional_parameters = subnet.additional_parameters
             additional_options = subnet.additional_options
 
-            lines.append('subnet %s netmask %s {' % (address, mask))
-            lines.append('\tpool {')
-            lines.append('\t\toption subnet-mask %s;' % mask)
-            lines.append('\t\toption routers %s;' % router)
-            lines.append('\t\trange %s %s;' % (start, end))
+            lines.append("subnet %s netmask %s {" % (address, mask))
+            lines.append("\tpool {")
+            lines.append("\t\toption subnet-mask %s;" % mask)
+            lines.append("\t\toption routers %s;" % router)
+            lines.append("\t\trange %s %s;" % (start, end))
             if lease_time:
-                lines.append('\t\tdefault-lease-time %d;' % lease_time)
-                lines.append('\t\tmax-lease-time %d;' % lease_time)
+                lines.append("\t\tdefault-lease-time %d;" % lease_time)
+                lines.append("\t\tmax-lease-time %d;" % lease_time)
             for param, value in additional_parameters.items():
-                lines.append('\t\t%s %s;' % (param, value))
+                lines.append("\t\t%s %s;" % (param, value))
             for option, value in additional_options.items():
-                lines.append('\t\toption %s %s;' % (option, value))
-            lines.append('\t}')
-            lines.append('}')
+                lines.append("\t\toption %s %s;" % (option, value))
+            lines.append("\t}")
+            lines.append("}")
 
         for mapping in self.static_mappings:
             identifier = mapping.identifier
             fixed_address = mapping.ipv4_address
-            host_fake_name = 'host%s' % identifier.replace(':', '')
+            host_fake_name = "host%s" % identifier.replace(":", "")
             lease_time = mapping.lease_time
 
-            lines.append('host %s {' % host_fake_name)
-            lines.append('\thardware ethernet %s;' % identifier)
-            lines.append('\tfixed-address %s;' % fixed_address)
+            lines.append("host %s {" % host_fake_name)
+            lines.append("\thardware ethernet %s;" % identifier)
+            lines.append("\tfixed-address %s;" % fixed_address)
             if lease_time:
-                lines.append('\tdefault-lease-time %d;' % lease_time)
-                lines.append('\tmax-lease-time %d;' % lease_time)
-            lines.append('}')
+                lines.append("\tdefault-lease-time %d;" % lease_time)
+                lines.append("\tmax-lease-time %d;" % lease_time)
+            lines.append("}")
 
-        config_str = '\n'.join(lines)
+        config_str = "\n".join(lines)
 
         return config_str
diff --git a/src/antlion/controllers/ap_lib/dhcp_server.py b/src/antlion/controllers/ap_lib/dhcp_server.py
index 01411c6..c52983b 100644
--- a/src/antlion/controllers/ap_lib/dhcp_server.py
+++ b/src/antlion/controllers/ap_lib/dhcp_server.py
@@ -16,6 +16,7 @@
 
 from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
 
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig
 from antlion.controllers.utils_lib.commands import shell
 from antlion import logger
 
@@ -37,9 +38,9 @@
         config: The dhcp server configuration that is being used.
     """
 
-    PROGRAM_FILE = 'dhcpd'
+    PROGRAM_FILE = "dhcpd"
 
-    def __init__(self, runner, interface, working_dir='/tmp'):
+    def __init__(self, runner, interface, working_dir="/tmp"):
         """
         Args:
             runner: Object that has a run_async and run methods for running
@@ -47,33 +48,34 @@
             interface: string, The name of the interface to use.
             working_dir: The directory to work out of.
         """
-        self._log = logger.create_logger(lambda msg: '[DHCP Server|%s] %s' % (
-            interface, msg))
+        self._log = logger.create_logger(lambda msg: f"[DHCP Server|{interface}] {msg}")
+
         self._runner = runner
         self._working_dir = working_dir
         self._shell = shell.ShellCommand(runner, working_dir)
-        self._stdio_log_file = 'dhcpd_%s.log' % interface
-        self._config_file = 'dhcpd_%s.conf' % interface
-        self._lease_file = 'dhcpd_%s.leases' % interface
-        self._pid_file = 'dhcpd_%s.pid' % interface
-        self._identifier = '%s.*%s' % (self.PROGRAM_FILE, self._config_file)
+        self._stdio_log_file = f"dhcpd_{interface}.log"
+        self._config_file = f"dhcpd_{interface}.conf"
+        self._lease_file = f"dhcpd_{interface}.leases"
+        self._pid_file = f"dhcpd_{interface}.pid"
+        self._identifier = f"{self.PROGRAM_FILE}.*{self._config_file}"
 
     # There is a slight timing issue where if the proc filesystem in Linux
     # doesn't get updated in time as when this is called, the NoInterfaceError
     # will happening.  By adding this retry, the error appears to have gone away
     # but will still show a warning if the problem occurs.  The error seems to
     # happen more with bridge interfaces than standard interfaces.
-    @retry(retry=retry_if_exception_type(NoInterfaceError),
-           stop=stop_after_attempt(3),
-           wait=wait_fixed(1))
-    def start(self, config, timeout=60):
+    @retry(
+        retry=retry_if_exception_type(NoInterfaceError),
+        stop=stop_after_attempt(3),
+        wait=wait_fixed(1),
+    )
+    def start(self, config: DhcpConfig, timeout_sec: int = 60) -> None:
         """Starts the dhcp server.
 
         Starts the dhcp server daemon and runs it in the background.
 
         Args:
-            config: dhcp_config.DhcpConfig, Configs to start the dhcp server
-                    with.
+            config: Configs to start the dhcp server with.
 
         Raises:
             Error: Raised when a dhcp server error is found.
@@ -86,20 +88,24 @@
         self._shell.delete_file(self._pid_file)
         self._shell.touch_file(self._lease_file)
 
-        dhcpd_command = '%s -cf "%s" -lf %s -f -pf "%s"' % (
-            self.PROGRAM_FILE, self._config_file, self._lease_file,
-            self._pid_file)
-        base_command = 'cd "%s"; %s' % (self._working_dir, dhcpd_command)
-        job_str = '%s > "%s" 2>&1' % (base_command, self._stdio_log_file)
+        dhcpd_command = (
+            f"{self.PROGRAM_FILE} "
+            f'-cf "{self._config_file}" '
+            f"-lf {self._lease_file} "
+            f'-pf "{self._pid_file}" '
+            "-f -d"
+        )
+
+        base_command = f'cd "{self._working_dir}"; {dhcpd_command}'
+        job_str = f'{base_command} > "{self._stdio_log_file}" 2>&1'
         self._runner.run_async(job_str)
 
         try:
-            self._wait_for_process(timeout=timeout)
-            self._wait_for_server(timeout=timeout)
+            self._wait_for_process(timeout=timeout_sec)
+            self._wait_for_server(timeout=timeout_sec)
         except:
             self._log.warn("Failed to start DHCP server.")
-            self._log.info("DHCP configuration:\n" +
-                           config.render_config_file() + "\n")
+            self._log.info("DHCP configuration:\n" + config.render_config_file() + "\n")
             self._log.info("DHCP logs:\n" + self.get_logs() + "\n")
             self.stop()
             raise
@@ -116,30 +122,13 @@
         """
         return self._shell.is_alive(self._identifier)
 
-    def get_logs(self):
+    def get_logs(self) -> str:
         """Pulls the log files from where dhcp server is running.
 
         Returns:
             A string of the dhcp server logs.
         """
-        try:
-            # Try reading the PID file. This will fail if the server failed to
-            # start.
-            pid = self._shell.read_file(self._pid_file)
-            # `dhcpd` logs to the syslog, where its messages are interspersed
-            # with all other programs that use the syslog. Log lines contain
-            # `dhcpd[<pid>]`, which we can search for to extract all the logs
-            # from this particular dhcpd instance.
-            # The logs are preferable to the stdio output, since they contain
-            # a superset of the information from stdio, including leases
-            # that the server provides.
-            return self._shell.run(
-                f"grep dhcpd.{pid} /var/log/messages").stdout
-        except Exception:
-            self._log.info(
-                "Failed to read logs from syslog (likely because the server " +
-                "failed to start). Falling back to stdio output.")
-            return self._shell.read_file(self._stdio_log_file)
+        return self._shell.read_file(self._stdio_log_file)
 
     def _wait_for_process(self, timeout=60):
         """Waits for the process to come up.
@@ -168,7 +157,8 @@
         start_time = time.time()
         while time.time() - start_time < timeout:
             success = self._shell.search_file(
-                'Wrote [0-9]* leases to leases file', self._stdio_log_file)
+                "Wrote [0-9]* leases to leases file", self._stdio_log_file
+            )
             if success:
                 return
 
@@ -194,14 +184,16 @@
         is_dead = not self.is_alive()
 
         no_interface = self._shell.search_file(
-            'Not configured to listen on any interfaces', self._stdio_log_file)
+            "Not configured to listen on any interfaces", self._stdio_log_file
+        )
         if no_interface:
             raise NoInterfaceError(
-                'Dhcp does not contain a subnet for any of the networks the'
-                ' current interfaces are on.')
+                "Dhcp does not contain a subnet for any of the networks the"
+                " current interfaces are on."
+            )
 
         if should_be_up and is_dead:
-            raise Error('Dhcp server failed to start.', self)
+            raise Error("Dhcp server failed to start.", self)
 
     def _write_configs(self, config):
         """Writes the configs to the dhcp server config file."""
diff --git a/src/antlion/controllers/ap_lib/extended_capabilities.py b/src/antlion/controllers/ap_lib/extended_capabilities.py
index c7c8ade..82029cc 100644
--- a/src/antlion/controllers/ap_lib/extended_capabilities.py
+++ b/src/antlion/controllers/ap_lib/extended_capabilities.py
@@ -31,6 +31,7 @@
     has the value of its offset; comments indicate capabilities that use
     multiple bits.
     """
+
     TWENTY_FORTY_BSS_COEXISTENCE_MANAGEMENT_SUPPORT = 0
     GLK = 1
     EXTENDED_CHANNEL_SWITCHING = 2
@@ -161,11 +162,12 @@
             at this time.
         """
         if ext_cap in [
-                ExtendedCapability.SERVICE_INTERVAL_GRANULARITY,
-                ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU
+            ExtendedCapability.SERVICE_INTERVAL_GRANULARITY,
+            ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU,
         ]:
             raise NotImplementedError(
-                f'{ext_cap.name} not implemented yet by {__class__}')
+                f"{ext_cap.name} not implemented yet by {__class__}"
+            )
         byte_offset, bit_offset = _offsets(ext_cap)
         if len(self._ext_cap) > byte_offset:
             # Use bit_offset to derive a mask that will check the correct bit.
@@ -179,8 +181,7 @@
 
     @property
     def proxy_arp_service(self) -> bool:
-        return self._capability_advertised(
-            ExtendedCapability.PROXY_ARP_SERVICE)
+        return self._capability_advertised(ExtendedCapability.PROXY_ARP_SERVICE)
 
     @property
     def utc_tsf_offset(self) -> bool:
diff --git a/src/antlion/controllers/ap_lib/hostapd.py b/src/antlion/controllers/ap_lib/hostapd.py
index de93ea4..b3f780d 100644
--- a/src/antlion/controllers/ap_lib/hostapd.py
+++ b/src/antlion/controllers/ap_lib/hostapd.py
@@ -17,15 +17,20 @@
 import logging
 import re
 import time
-from typing import Set
 
-from antlion.controllers.ap_lib import hostapd_config
+from typing import Any, Dict, Optional, Set
+
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionManagementRequest
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
 from antlion.controllers.utils_lib.commands import shell
 from antlion.libs.proc.job import Result
 
+PROGRAM_FILE = "/usr/sbin/hostapd"
+CLI_PROGRAM_FILE = "/usr/bin/hostapd_cli"
+
 
 class Error(Exception):
     """An error caused by hostapd."""
@@ -38,15 +43,12 @@
         config: The hostapd configuration that is being used.
     """
 
-    PROGRAM_FILE = '/usr/sbin/hostapd'
-    CLI_PROGRAM_FILE = '/usr/bin/hostapd_cli'
-
-    def __init__(self, runner, interface, working_dir='/tmp'):
+    def __init__(self, runner: Any, interface: str, working_dir: str = "/tmp") -> None:
         """
         Args:
             runner: Object that has run_async and run methods for executing
                     shell commands (e.g. connection.SshConnection)
-            interface: string, The name of the interface to use (eg. wlan0).
+            interface: The name of the interface to use (eg. wlan0).
             working_dir: The directory to work out of.
         """
         self._runner = runner
@@ -54,12 +56,17 @@
         self._working_dir = working_dir
         self.config = None
         self._shell = shell.ShellCommand(runner, working_dir)
-        self._log_file = 'hostapd-%s.log' % self._interface
-        self._ctrl_file = 'hostapd-%s.ctrl' % self._interface
-        self._config_file = 'hostapd-%s.conf' % self._interface
-        self._identifier = '%s.*%s' % (self.PROGRAM_FILE, self._config_file)
+        self._log_file = f"hostapd-{self._interface}.log"
+        self._ctrl_file = f"hostapd-{self._interface}.ctrl"
+        self._config_file = f"hostapd-{self._interface}.conf"
+        self._identifier = f"{PROGRAM_FILE}.*{self._config_file}"
 
-    def start(self, config, timeout=60, additional_parameters=None):
+    def start(
+        self,
+        config: Any,
+        timeout: int = 60,
+        additional_parameters: Optional[Dict[str, Any]] = None,
+    ) -> None:
         """Starts hostapd
 
         Starts the hostapd daemon and runs it in the background.
@@ -89,11 +96,9 @@
         self._shell.delete_file(self._config_file)
         self._write_configs(additional_parameters=additional_parameters)
 
-        hostapd_command = '%s -dd -t "%s"' % (self.PROGRAM_FILE,
-                                              self._config_file)
-        base_command = 'cd "%s"; %s' % (self._working_dir, hostapd_command)
-        job_str = 'rfkill unblock all; %s > "%s" 2>&1' %\
-                  (base_command, self._log_file)
+        hostapd_command = f'{PROGRAM_FILE} -dd -t "{self._config_file}"'
+        base_command = f'cd "{self._working_dir}"; {hostapd_command}'
+        job_str = f'rfkill unblock all; {base_command} > "{self._log_file}" 2>&1'
         self._runner.run_async(job_str)
 
         try:
@@ -103,12 +108,12 @@
             self.stop()
             raise
 
-    def stop(self):
+    def stop(self) -> None:
         """Kills the daemon if it is running."""
         if self.is_alive():
             self._shell.kill(self._identifier)
 
-    def channel_switch(self, channel_num):
+    def channel_switch(self, channel_num: int) -> None:
         """Switches to the given channel.
 
         Returns:
@@ -118,26 +123,25 @@
         try:
             channel_freq = hostapd_constants.FREQUENCY_MAP[channel_num]
         except KeyError:
-            raise ValueError('Invalid channel number {}'.format(channel_num))
+            raise ValueError(f"Invalid channel number {channel_num}")
         csa_beacon_count = 10
-        channel_switch_cmd = 'chan_switch {} {}'.format(
-            csa_beacon_count, channel_freq)
-        result = self._run_hostapd_cli_cmd(channel_switch_cmd)
+        channel_switch_cmd = f"chan_switch {csa_beacon_count} {channel_freq}"
+        self._run_hostapd_cli_cmd(channel_switch_cmd)
 
-    def get_current_channel(self):
+    def get_current_channel(self) -> int:
         """Returns the current channel number.
 
         Raises: See _run_hostapd_cli_cmd
         """
-        status_cmd = 'status'
+        status_cmd = "status"
         result = self._run_hostapd_cli_cmd(status_cmd)
-        match = re.search(r'^channel=(\d+)$', result.stdout, re.MULTILINE)
+        match = re.search(r"^channel=(\d+)$", result.stdout, re.MULTILINE)
         if not match:
-            raise Error('Current channel could not be determined')
+            raise Error("Current channel could not be determined")
         try:
             channel = int(match.group(1))
         except ValueError:
-            raise Error('Internal error: current channel could not be parsed')
+            raise Error("Internal error: current channel could not be parsed")
         return channel
 
     def _list_sta(self) -> Result:
@@ -147,7 +151,7 @@
             acts.libs.proc.job.Result containing the results of the command.
         Raises: See _run_hostapd_cli_cmd
         """
-        list_sta_cmd = 'list_sta'
+        list_sta_cmd = "list_sta"
         return self._run_hostapd_cli_cmd(list_sta_cmd)
 
     def get_stas(self) -> Set[str]:
@@ -156,7 +160,7 @@
         stas = set()
         for line in list_sta_result.stdout.splitlines():
             # Each line must be a valid MAC address. Capture it.
-            m = re.match(r'((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})', line)
+            m = re.match(r"((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})", line)
             if m:
                 stas.add(m.group(1))
         return stas
@@ -168,11 +172,10 @@
             acts.libs.proc.job.Result containing the results of the command.
         Raises: See _run_hostapd_cli_cmd
         """
-        sta_cmd = 'sta {}'.format(sta_mac)
+        sta_cmd = "sta {}".format(sta_mac)
         return self._run_hostapd_cli_cmd(sta_cmd)
 
-    def get_sta_extended_capabilities(self,
-                                      sta_mac: str) -> ExtendedCapabilities:
+    def get_sta_extended_capabilities(self, sta_mac: str) -> ExtendedCapabilities:
         """Get extended capabilities for the given STA, as seen by the AP.
 
         Args:
@@ -186,19 +189,18 @@
         # hostapd ext_capab field is a hex encoded string representation of the
         # 802.11 extended capabilities structure, each byte represented by two
         # chars (each byte having format %02x).
-        m = re.search(r'ext_capab=([0-9A-Faf]+)', sta_result.stdout,
-                      re.MULTILINE)
+        m = re.search(r"ext_capab=([0-9A-Faf]+)", sta_result.stdout, re.MULTILINE)
         if not m:
-            raise Error('Failed to get ext_capab from STA details')
+            raise Error("Failed to get ext_capab from STA details")
         raw_ext_capab = m.group(1)
         try:
             return ExtendedCapabilities(bytearray.fromhex(raw_ext_capab))
         except ValueError:
-            raise Error(
-                f'ext_capab contains invalid hex string repr {raw_ext_capab}')
+            raise Error(f"ext_capab contains invalid hex string repr {raw_ext_capab}")
 
-    def _bss_tm_req(self, client_mac: str,
-                    request: BssTransitionManagementRequest) -> Result:
+    def _bss_tm_req(
+        self, client_mac: str, request: BssTransitionManagementRequest
+    ) -> Result:
         """Send a hostapd BSS Transition Management request command to a STA.
 
         Args:
@@ -208,22 +210,22 @@
             acts.libs.proc.job.Result containing the results of the command.
         Raises: See _run_hostapd_cli_cmd
         """
-        bss_tm_req_cmd = f'bss_tm_req {client_mac}'
+        bss_tm_req_cmd = f"bss_tm_req {client_mac}"
 
         if request.abridged:
-            bss_tm_req_cmd += ' abridged=1'
+            bss_tm_req_cmd += " abridged=1"
         if request.bss_termination_included and request.bss_termination_duration:
-            bss_tm_req_cmd += f' bss_term={request.bss_termination_duration.duration}'
+            bss_tm_req_cmd += f" bss_term={request.bss_termination_duration.duration}"
         if request.disassociation_imminent:
-            bss_tm_req_cmd += ' disassoc_imminent=1'
+            bss_tm_req_cmd += " disassoc_imminent=1"
         if request.disassociation_timer is not None:
-            bss_tm_req_cmd += f' disassoc_timer={request.disassociation_timer}'
+            bss_tm_req_cmd += f" disassoc_timer={request.disassociation_timer}"
         if request.preferred_candidate_list_included:
-            bss_tm_req_cmd += ' pref=1'
+            bss_tm_req_cmd += " pref=1"
         if request.session_information_url:
-            bss_tm_req_cmd += f' url={request.session_information_url}'
+            bss_tm_req_cmd += f" url={request.session_information_url}"
         if request.validity_interval:
-            bss_tm_req_cmd += f' valid_int={request.validity_interval}'
+            bss_tm_req_cmd += f" valid_int={request.validity_interval}"
 
         # neighbor= can appear multiple times, so it requires special handling.
         for neighbor in request.candidate_list:
@@ -232,13 +234,15 @@
             op_class = neighbor.operating_class
             chan_num = neighbor.channel_number
             phy_type = int(neighbor.phy_type)
-            bss_tm_req_cmd += f' neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}'
+            bss_tm_req_cmd += (
+                f" neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}"
+            )
 
         return self._run_hostapd_cli_cmd(bss_tm_req_cmd)
 
     def send_bss_transition_management_req(
-            self, sta_mac: str,
-            request: BssTransitionManagementRequest) -> Result:
+        self, sta_mac: str, request: BssTransitionManagementRequest
+    ) -> Result:
         """Send a BSS Transition Management request to an associated STA.
 
         Args:
@@ -250,14 +254,14 @@
         """
         return self._bss_tm_req(sta_mac, request)
 
-    def is_alive(self):
+    def is_alive(self) -> bool:
         """
         Returns:
             True if the daemon is running.
         """
         return self._shell.is_alive(self._identifier)
 
-    def pull_logs(self):
+    def pull_logs(self) -> str:
         """Pulls the log files from where hostapd is running.
 
         Returns:
@@ -266,7 +270,7 @@
         # TODO: Auto pulling of logs when stop is called.
         return self._shell.read_file(self._log_file)
 
-    def _run_hostapd_cli_cmd(self, cmd):
+    def _run_hostapd_cli_cmd(self, cmd: str) -> Result:
         """Run the given hostapd_cli command.
 
         Runs the command, waits for the output (up to default timeout), and
@@ -283,12 +287,12 @@
             antlion.controllers.utils_lib.ssh.connection.CommandError: Ssh worked,
                 but the command had an error executing.
         """
-        hostapd_cli_job = 'cd {}; {} -p {} {}'.format(self._working_dir,
-                                                      self.CLI_PROGRAM_FILE,
-                                                      self._ctrl_file, cmd)
+        hostapd_cli_job = (
+            f"cd {self._working_dir}; " f"{CLI_PROGRAM_FILE} -p {self._ctrl_file} {cmd}"
+        )
         return self._runner.run(hostapd_cli_job)
 
-    def _wait_for_process(self, timeout=60):
+    def _wait_for_process(self, timeout: int = 60) -> None:
         """Waits for the process to come up.
 
         Waits until the hostapd process is found running, or there is
@@ -302,7 +306,7 @@
             self._scan_for_errors(False)
             time.sleep(0.1)
 
-    def _wait_for_interface(self, timeout=60):
+    def _wait_for_interface(self, timeout: int = 60) -> None:
         """Waits for hostapd to report that the interface is up.
 
         Waits until hostapd says the interface has been brought up or an
@@ -313,15 +317,14 @@
         start_time = time.time()
         while time.time() - start_time < timeout:
             time.sleep(0.1)
-            success = self._shell.search_file('Setup of interface done',
-                                              self._log_file)
+            success = self._shell.search_file("Setup of interface done", self._log_file)
             if success:
                 return
             self._scan_for_errors(False)
 
         self._scan_for_errors(True)
 
-    def _scan_for_errors(self, should_be_up):
+    def _scan_for_errors(self, should_be_up: bool) -> None:
         """Scans the hostapd log for any errors.
 
         Args:
@@ -335,42 +338,46 @@
         # Store this so that all other errors have priority.
         is_dead = not self.is_alive()
 
-        bad_config = self._shell.search_file('Interface initialization failed',
-                                             self._log_file)
+        bad_config = self._shell.search_file(
+            "Interface initialization failed", self._log_file
+        )
         if bad_config:
-            raise Error('Interface failed to start', self)
+            raise Error("Interface failed to start", self)
 
         bad_config = self._shell.search_file(
-            "Interface %s wasn't started" % self._interface, self._log_file)
+            f"Interface {self._interface} wasn't started", self._log_file
+        )
         if bad_config:
-            raise Error('Interface failed to start', self)
+            raise Error("Interface failed to start", self)
 
         if should_be_up and is_dead:
-            raise Error('Hostapd failed to start', self)
+            raise Error("Hostapd failed to start", self)
 
-    def _write_configs(self, additional_parameters=None):
+    def _write_configs(
+        self, additional_parameters: Optional[Dict[str, Any]] = None
+    ) -> None:
         """Writes the configs to the hostapd config file."""
         self._shell.delete_file(self._config_file)
 
         interface_configs = collections.OrderedDict()
-        interface_configs['interface'] = self._interface
-        interface_configs['ctrl_interface'] = self._ctrl_file
-        pairs = ('%s=%s' % (k, v) for k, v in interface_configs.items())
+        interface_configs["interface"] = self._interface
+        interface_configs["ctrl_interface"] = self._ctrl_file
+        pairs = (f"{k}={v}" for k, v in interface_configs.items())
 
         packaged_configs = self.config.package_configs()
         if additional_parameters:
             packaged_configs.append(additional_parameters)
         for packaged_config in packaged_configs:
-            config_pairs = ('%s=%s' % (k, v)
-                            for k, v in packaged_config.items()
-                            if v is not None)
+            config_pairs = (
+                f"{k}={v}" for k, v in packaged_config.items() if v is not None
+            )
             pairs = itertools.chain(pairs, config_pairs)
 
-        hostapd_conf = '\n'.join(pairs)
+        hostapd_conf = "\n".join(pairs)
 
-        logging.info('Writing %s' % self._config_file)
-        logging.debug('******************Start*******************')
-        logging.debug('\n%s' % hostapd_conf)
-        logging.debug('*******************End********************')
+        logging.info(f"Writing {self._config_file}")
+        logging.debug("******************Start*******************")
+        logging.debug(f"\n{hostapd_conf}")
+        logging.debug("*******************End********************")
 
         self._shell.write_file(self._config_file, hostapd_conf)
diff --git a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py b/src/antlion/controllers/ap_lib/hostapd_ap_preset.py
index 28062f5..3b694c0 100644
--- a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py
+++ b/src/antlion/controllers/ap_lib/hostapd_ap_preset.py
@@ -12,58 +12,58 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from typing import FrozenSet
+from typing import Any, FrozenSet, List, Optional
 
 from antlion import utils
-
-import antlion.controllers.ap_lib.third_party_ap_profiles.actiontec as actiontec
-import antlion.controllers.ap_lib.third_party_ap_profiles.asus as asus
-import antlion.controllers.ap_lib.third_party_ap_profiles.belkin as belkin
-import antlion.controllers.ap_lib.third_party_ap_profiles.linksys as linksys
-import antlion.controllers.ap_lib.third_party_ap_profiles.netgear as netgear
-import antlion.controllers.ap_lib.third_party_ap_profiles.securifi as securifi
-import antlion.controllers.ap_lib.third_party_ap_profiles.tplink as tplink
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.third_party_ap_profiles import (
+    actiontec,
+    asus,
+    belkin,
+    linksys,
+    netgear,
+    securifi,
+    tplink,
+)
 
 
-def _get_or_default(var, default_value):
+def _get_or_default(var: Optional[Any], default_value: Any) -> Any:
     """Check variable and return non-null value.
 
-   Args:
-        var: Any variable.
-        default_value: Value to return if the var is None.
+    Args:
+         var: Any variable.
+         default_value: Value to return if the var is None.
 
-   Returns:
-        Variable value if not None, default value otherwise.
+    Returns:
+         Variable value if not None, default value otherwise.
     """
     return var if var is not None else default_value
 
 
 def create_ap_preset(
-        profile_name='whirlwind',
-        iface_wlan_2g=None,
-        iface_wlan_5g=None,
-        channel=None,
-        mode=None,
-        frequency=None,
-        security=None,
-        pmf_support=None,
-        ssid=None,
-        hidden=None,
-        dtim_period=None,
-        frag_threshold=None,
-        rts_threshold=None,
-        force_wmm=None,
-        beacon_interval=None,
-        short_preamble=None,
-        n_capabilities=None,
-        ac_capabilities=None,
-        vht_bandwidth=None,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-        bss_settings=[]):
+    profile_name: str = "whirlwind",
+    iface_wlan_2g: Optional[str] = None,
+    iface_wlan_5g: Optional[str] = None,
+    channel: Optional[int] = None,
+    mode: Optional[str] = None,
+    frequency: Optional[int] = None,
+    security: Optional[Security] = None,
+    pmf_support: Optional[int] = None,
+    ssid: Optional[str] = None,
+    hidden: Optional[bool] = None,
+    dtim_period: Optional[int] = None,
+    frag_threshold: Optional[int] = None,
+    rts_threshold: Optional[int] = None,
+    force_wmm: Optional[bool] = None,
+    beacon_interval: Optional[int] = None,
+    short_preamble: Optional[bool] = None,
+    n_capabilities: Optional[List[Any]] = None,
+    ac_capabilities: Optional[List[Any]] = None,
+    vht_bandwidth: Optional[int] = None,
+    wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    bss_settings: List[Any] = [],
+):
     """AP preset config generator.  This a wrapper for hostapd_config but
        but supplies the default settings for the preset that is selected.
 
@@ -77,8 +77,8 @@
         channel: int, channel number.
         dtim: int, DTIM value of the AP, default is 2.
         frequency: int, frequency of channel.
-        security: Security, the secuirty settings to use.
-        ssid: string, The name of the ssid to brodcast.
+        security: The security settings to use.
+        ssid: string, The name of the ssid to broadcast.
         pmf_support: int, whether pmf is disabled, enabled, or required
         vht_bandwidth: VHT bandwidth for 11ac operation.
         bss_settings: The settings for all bss.
@@ -99,19 +99,17 @@
     """
 
     # Verify interfaces
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
 
     if channel:
         frequency = hostapd_config.get_frequency_for_channel(channel)
     elif frequency:
         channel = hostapd_config.get_channel_for_frequency(frequency)
     else:
-        raise ValueError('Specify either frequency or channel.')
+        raise ValueError("Specify either frequency or channel.")
 
-    if profile_name == 'whirlwind':
+    if profile_name == "whirlwind":
         # profile indicates phy mode is 11bgn for 2.4Ghz or 11acn for 5Ghz
         hidden = _get_or_default(hidden, False)
         force_wmm = _get_or_default(force_wmm, True)
@@ -123,14 +121,17 @@
         if frequency < 5000:
             interface = iface_wlan_2g
             mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
-            n_capabilities = _get_or_default(n_capabilities, [
-                hostapd_constants.N_CAPABILITY_LDPC,
-                hostapd_constants.N_CAPABILITY_SGI20,
-                hostapd_constants.N_CAPABILITY_SGI40,
-                hostapd_constants.N_CAPABILITY_TX_STBC,
-                hostapd_constants.N_CAPABILITY_RX_STBC1,
-                hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-            ])
+            n_capabilities = _get_or_default(
+                n_capabilities,
+                [
+                    hostapd_constants.N_CAPABILITY_LDPC,
+                    hostapd_constants.N_CAPABILITY_SGI20,
+                    hostapd_constants.N_CAPABILITY_SGI40,
+                    hostapd_constants.N_CAPABILITY_TX_STBC,
+                    hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+                ],
+            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -147,7 +148,8 @@
                 frag_threshold=frag_threshold,
                 rts_threshold=rts_threshold,
                 wnm_features=wnm_features,
-                bss_settings=bss_settings)
+                bss_settings=bss_settings,
+            )
         else:
             interface = iface_wlan_5g
             vht_bandwidth = _get_or_default(vht_bandwidth, 80)
@@ -164,32 +166,42 @@
             if not vht_bandwidth:
                 pass
             elif vht_bandwidth >= 40:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC, extended_channel,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1
-                ])
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        extended_channel,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    ],
+                )
             else:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1,
-                    hostapd_constants.N_CAPABILITY_HT20
-                ])
-            ac_capabilities = _get_or_default(ac_capabilities, [
-                hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-                hostapd_constants.AC_CAPABILITY_RXLDPC,
-                hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-                hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-                hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-                hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-                hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-                hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN
-            ])
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                        hostapd_constants.N_CAPABILITY_HT20,
+                    ],
+                )
+            ac_capabilities = _get_or_default(
+                ac_capabilities,
+                [
+                    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+                    hostapd_constants.AC_CAPABILITY_RXLDPC,
+                    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+                    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+                    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+                    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+                    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+                ],
+            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -207,56 +219,61 @@
                 rts_threshold=rts_threshold,
                 n_capabilities=n_capabilities,
                 ac_capabilities=ac_capabilities,
-                bss_settings=bss_settings)
-    elif profile_name == 'whirlwind_11ab_legacy':
+                bss_settings=bss_settings,
+            )
+    elif profile_name == "whirlwind_11ab_legacy":
         if frequency < 5000:
             mode = hostapd_constants.MODE_11B
         else:
             mode = hostapd_constants.MODE_11A
 
-        config = create_ap_preset(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  ssid=ssid,
-                                  channel=channel,
-                                  mode=mode,
-                                  security=security,
-                                  pmf_support=pmf_support,
-                                  hidden=hidden,
-                                  force_wmm=force_wmm,
-                                  beacon_interval=beacon_interval,
-                                  short_preamble=short_preamble,
-                                  dtim_period=dtim_period,
-                                  rts_threshold=rts_threshold,
-                                  frag_threshold=frag_threshold,
-                                  n_capabilities=[],
-                                  ac_capabilities=[],
-                                  vht_bandwidth=None,
-                                  wnm_features=wnm_features)
-    elif profile_name == 'whirlwind_11ag_legacy':
+        config = create_ap_preset(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            ssid=ssid,
+            channel=channel,
+            mode=mode,
+            security=security,
+            pmf_support=pmf_support,
+            hidden=hidden,
+            force_wmm=force_wmm,
+            beacon_interval=beacon_interval,
+            short_preamble=short_preamble,
+            dtim_period=dtim_period,
+            rts_threshold=rts_threshold,
+            frag_threshold=frag_threshold,
+            n_capabilities=[],
+            ac_capabilities=[],
+            vht_bandwidth=None,
+            wnm_features=wnm_features,
+        )
+    elif profile_name == "whirlwind_11ag_legacy":
         if frequency < 5000:
             mode = hostapd_constants.MODE_11G
         else:
             mode = hostapd_constants.MODE_11A
 
-        config = create_ap_preset(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  ssid=ssid,
-                                  channel=channel,
-                                  mode=mode,
-                                  security=security,
-                                  pmf_support=pmf_support,
-                                  hidden=hidden,
-                                  force_wmm=force_wmm,
-                                  beacon_interval=beacon_interval,
-                                  short_preamble=short_preamble,
-                                  dtim_period=dtim_period,
-                                  rts_threshold=rts_threshold,
-                                  frag_threshold=frag_threshold,
-                                  n_capabilities=[],
-                                  ac_capabilities=[],
-                                  vht_bandwidth=None,
-                                  wnm_features=wnm_features)
-    elif profile_name == 'mistral':
+        config = create_ap_preset(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            ssid=ssid,
+            channel=channel,
+            mode=mode,
+            security=security,
+            pmf_support=pmf_support,
+            hidden=hidden,
+            force_wmm=force_wmm,
+            beacon_interval=beacon_interval,
+            short_preamble=short_preamble,
+            dtim_period=dtim_period,
+            rts_threshold=rts_threshold,
+            frag_threshold=frag_threshold,
+            n_capabilities=[],
+            ac_capabilities=[],
+            vht_bandwidth=None,
+            wnm_features=wnm_features,
+        )
+    elif profile_name == "mistral":
         hidden = _get_or_default(hidden, False)
         force_wmm = _get_or_default(force_wmm, True)
         beacon_interval = _get_or_default(beacon_interval, 100)
@@ -268,27 +285,31 @@
         # Google IE
         # Country Code IE ('us' lowercase)
         vendor_elements = {
-            'vendor_elements':
-            'dd0cf4f5e80505ff0000ffffffff'
-            '070a75732024041e95051e00'
+            "vendor_elements": "dd0cf4f5e80505ff0000ffffffff" "070a75732024041e95051e00"
         }
-        default_configs = {'bridge': 'br-lan', 'iapp_interface': 'br-lan'}
+        default_configs = {"bridge": "br-lan", "iapp_interface": "br-lan"}
 
         if frequency < 5000:
             interface = iface_wlan_2g
             mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
-            n_capabilities = _get_or_default(n_capabilities, [
-                hostapd_constants.N_CAPABILITY_LDPC,
-                hostapd_constants.N_CAPABILITY_SGI20,
-                hostapd_constants.N_CAPABILITY_SGI40,
-                hostapd_constants.N_CAPABILITY_TX_STBC,
-                hostapd_constants.N_CAPABILITY_RX_STBC1,
-                hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-            ])
+            n_capabilities = _get_or_default(
+                n_capabilities,
+                [
+                    hostapd_constants.N_CAPABILITY_LDPC,
+                    hostapd_constants.N_CAPABILITY_SGI20,
+                    hostapd_constants.N_CAPABILITY_SGI40,
+                    hostapd_constants.N_CAPABILITY_TX_STBC,
+                    hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+                ],
+            )
 
             additional_params = utils.merge_dicts(
-                vendor_elements, hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, default_configs)
+                vendor_elements,
+                hostapd_constants.ENABLE_RRM_BEACON_REPORT,
+                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
+                default_configs,
+            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -307,7 +328,8 @@
                 wnm_features=wnm_features,
                 bss_settings=bss_settings,
                 additional_parameters=additional_params,
-                set_ap_defaults_profile=profile_name)
+                set_ap_defaults_profile=profile_name,
+            )
         else:
             interface = iface_wlan_5g
             vht_bandwidth = _get_or_default(vht_bandwidth, 80)
@@ -321,41 +343,54 @@
                 mode = hostapd_constants.MODE_11N_MIXED
                 extended_channel = hostapd_constants.N_CAPABILITY_HT20
             if vht_bandwidth >= 40:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC, extended_channel,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1
-                ])
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        extended_channel,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    ],
+                )
             else:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1,
-                    hostapd_constants.N_CAPABILITY_HT20
-                ])
-            ac_capabilities = _get_or_default(ac_capabilities, [
-                hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-                hostapd_constants.AC_CAPABILITY_RXLDPC,
-                hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-                hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-                hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-                hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-                hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-                hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
-                hostapd_constants.AC_CAPABILITY_SU_BEAMFORMER,
-                hostapd_constants.AC_CAPABILITY_SU_BEAMFORMEE,
-                hostapd_constants.AC_CAPABILITY_MU_BEAMFORMER,
-                hostapd_constants.AC_CAPABILITY_SOUNDING_DIMENSION_4,
-                hostapd_constants.AC_CAPABILITY_BF_ANTENNA_4
-            ])
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                        hostapd_constants.N_CAPABILITY_HT20,
+                    ],
+                )
+            ac_capabilities = _get_or_default(
+                ac_capabilities,
+                [
+                    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+                    hostapd_constants.AC_CAPABILITY_RXLDPC,
+                    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+                    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+                    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+                    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+                    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_SU_BEAMFORMER,
+                    hostapd_constants.AC_CAPABILITY_SU_BEAMFORMEE,
+                    hostapd_constants.AC_CAPABILITY_MU_BEAMFORMER,
+                    hostapd_constants.AC_CAPABILITY_SOUNDING_DIMENSION_4,
+                    hostapd_constants.AC_CAPABILITY_BF_ANTENNA_4,
+                ],
+            )
 
             additional_params = utils.merge_dicts(
-                vendor_elements, hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, default_configs)
+                vendor_elements,
+                hostapd_constants.ENABLE_RRM_BEACON_REPORT,
+                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
+                default_configs,
+            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -376,111 +411,133 @@
                 wnm_features=wnm_features,
                 bss_settings=bss_settings,
                 additional_parameters=additional_params,
-                set_ap_defaults_profile=profile_name)
-    elif profile_name == 'actiontec_pk5000':
-        config = actiontec.actiontec_pk5000(iface_wlan_2g=iface_wlan_2g,
-                                            channel=channel,
-                                            ssid=ssid,
-                                            security=security)
-    elif profile_name == 'actiontec_mi424wr':
-        config = actiontec.actiontec_mi424wr(iface_wlan_2g=iface_wlan_2g,
-                                             channel=channel,
-                                             ssid=ssid,
-                                             security=security)
-    elif profile_name == 'asus_rtac66u':
-        config = asus.asus_rtac66u(iface_wlan_2g=iface_wlan_2g,
-                                   iface_wlan_5g=iface_wlan_5g,
-                                   channel=channel,
-                                   ssid=ssid,
-                                   security=security)
-    elif profile_name == 'asus_rtac86u':
-        config = asus.asus_rtac86u(iface_wlan_2g=iface_wlan_2g,
-                                   iface_wlan_5g=iface_wlan_5g,
-                                   channel=channel,
-                                   ssid=ssid,
-                                   security=security)
-    elif profile_name == 'asus_rtac5300':
-        config = asus.asus_rtac5300(iface_wlan_2g=iface_wlan_2g,
-                                    iface_wlan_5g=iface_wlan_5g,
-                                    channel=channel,
-                                    ssid=ssid,
-                                    security=security)
-    elif profile_name == 'asus_rtn56u':
-        config = asus.asus_rtn56u(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  channel=channel,
-                                  ssid=ssid,
-                                  security=security)
-    elif profile_name == 'asus_rtn66u':
-        config = asus.asus_rtn66u(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  channel=channel,
-                                  ssid=ssid,
-                                  security=security)
-    elif profile_name == 'belkin_f9k1001v5':
-        config = belkin.belkin_f9k1001v5(iface_wlan_2g=iface_wlan_2g,
-                                         channel=channel,
-                                         ssid=ssid,
-                                         security=security)
-    elif profile_name == 'linksys_ea4500':
-        config = linksys.linksys_ea4500(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'linksys_ea9500':
-        config = linksys.linksys_ea9500(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'linksys_wrt1900acv2':
-        config = linksys.linksys_wrt1900acv2(iface_wlan_2g=iface_wlan_2g,
-                                             iface_wlan_5g=iface_wlan_5g,
-                                             channel=channel,
-                                             ssid=ssid,
-                                             security=security)
-    elif profile_name == 'netgear_r7000':
-        config = netgear.netgear_r7000(iface_wlan_2g=iface_wlan_2g,
-                                       iface_wlan_5g=iface_wlan_5g,
-                                       channel=channel,
-                                       ssid=ssid,
-                                       security=security)
-    elif profile_name == 'netgear_wndr3400':
-        config = netgear.netgear_wndr3400(iface_wlan_2g=iface_wlan_2g,
-                                          iface_wlan_5g=iface_wlan_5g,
-                                          channel=channel,
-                                          ssid=ssid,
-                                          security=security)
-    elif profile_name == 'securifi_almond':
-        config = securifi.securifi_almond(iface_wlan_2g=iface_wlan_2g,
-                                          channel=channel,
-                                          ssid=ssid,
-                                          security=security)
-    elif profile_name == 'tplink_archerc5':
-        config = tplink.tplink_archerc5(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'tplink_archerc7':
-        config = tplink.tplink_archerc7(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'tplink_c1200':
-        config = tplink.tplink_c1200(iface_wlan_2g=iface_wlan_2g,
-                                     iface_wlan_5g=iface_wlan_5g,
-                                     channel=channel,
-                                     ssid=ssid,
-                                     security=security)
-    elif profile_name == 'tplink_tlwr940n':
-        config = tplink.tplink_tlwr940n(iface_wlan_2g=iface_wlan_2g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
+                set_ap_defaults_profile=profile_name,
+            )
+    elif profile_name == "actiontec_pk5000":
+        config = actiontec.actiontec_pk5000(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "actiontec_mi424wr":
+        config = actiontec.actiontec_mi424wr(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "asus_rtac66u":
+        config = asus.asus_rtac66u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtac86u":
+        config = asus.asus_rtac86u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtac5300":
+        config = asus.asus_rtac5300(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtn56u":
+        config = asus.asus_rtn56u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtn66u":
+        config = asus.asus_rtn66u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "belkin_f9k1001v5":
+        config = belkin.belkin_f9k1001v5(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "linksys_ea4500":
+        config = linksys.linksys_ea4500(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "linksys_ea9500":
+        config = linksys.linksys_ea9500(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "linksys_wrt1900acv2":
+        config = linksys.linksys_wrt1900acv2(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "netgear_r7000":
+        config = netgear.netgear_r7000(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "netgear_wndr3400":
+        config = netgear.netgear_wndr3400(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "securifi_almond":
+        config = securifi.securifi_almond(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "tplink_archerc5":
+        config = tplink.tplink_archerc5(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_archerc7":
+        config = tplink.tplink_archerc7(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_c1200":
+        config = tplink.tplink_c1200(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_tlwr940n":
+        config = tplink.tplink_tlwr940n(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
     else:
-        raise ValueError('Invalid ap model specified (%s)' % profile_name)
+        raise ValueError(f"Invalid ap model specified ({profile_name})")
 
     return config
diff --git a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py b/src/antlion/controllers/ap_lib/hostapd_bss_settings.py
index 3d298f5..56a5422 100644
--- a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py
+++ b/src/antlion/controllers/ap_lib/hostapd_bss_settings.py
@@ -37,12 +37,12 @@
     def generate_dict(self):
         """Returns: A dictionary of bss settings."""
         settings = collections.OrderedDict()
-        settings['bss'] = self.name
+        settings["bss"] = self.name
         if self.bssid:
-            settings['bssid'] = self.bssid
+            settings["bssid"] = self.bssid
         if self.ssid:
-            settings['ssid'] = self.ssid
-            settings['ignore_broadcast_ssid'] = 1 if self.hidden else 0
+            settings["ssid"] = self.ssid
+            settings["ignore_broadcast_ssid"] = 1 if self.hidden else 0
 
         if self.security:
             security_settings = self.security.generate_dict()
diff --git a/src/antlion/controllers/ap_lib/hostapd_config.py b/src/antlion/controllers/ap_lib/hostapd_config.py
index abb50c9..a886e04 100644
--- a/src/antlion/controllers/ap_lib/hostapd_config.py
+++ b/src/antlion/controllers/ap_lib/hostapd_config.py
@@ -21,16 +21,24 @@
 
 def ht40_plus_allowed(channel):
     """Returns: True iff HT40+ is enabled for this configuration."""
-    channel_supported = (channel in hostapd_constants.HT40_ALLOW_MAP[
-        hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS])
-    return (channel_supported)
+    channel_supported = (
+        channel
+        in hostapd_constants.HT40_ALLOW_MAP[
+            hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
+        ]
+    )
+    return channel_supported
 
 
 def ht40_minus_allowed(channel):
     """Returns: True iff HT40- is enabled for this configuration."""
-    channel_supported = (channel in hostapd_constants.HT40_ALLOW_MAP[
-        hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS])
-    return (channel_supported)
+    channel_supported = (
+        channel
+        in hostapd_constants.HT40_ALLOW_MAP[
+            hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
+        ]
+    )
+    return channel_supported
 
 
 def get_frequency_for_channel(channel):
@@ -43,12 +51,11 @@
         int, frequency in MHz associated with the channel.
 
     """
-    for frequency, channel_iter in \
-        hostapd_constants.CHANNEL_MAP.items():
+    for frequency, channel_iter in hostapd_constants.CHANNEL_MAP.items():
         if channel == channel_iter:
             return frequency
     else:
-        raise ValueError('Unknown channel value: %r.' % channel)
+        raise ValueError("Unknown channel value: %r." % channel)
 
 
 def get_channel_for_frequency(frequency):
@@ -72,61 +79,68 @@
 
     def _get_11ac_center_channel_from_channel(self, channel):
         """Returns the center channel of the selected channel band based
-           on the channel and channel bandwidth provided.
+        on the channel and channel bandwidth provided.
         """
         channel = int(channel)
         center_channel_delta = hostapd_constants.CENTER_CHANNEL_MAP[
-            self._vht_oper_chwidth]['delta']
+            self._vht_oper_chwidth
+        ]["delta"]
 
-        for channel_map in hostapd_constants.CENTER_CHANNEL_MAP[
-                self._vht_oper_chwidth]['channels']:
+        for channel_map in hostapd_constants.CENTER_CHANNEL_MAP[self._vht_oper_chwidth][
+            "channels"
+        ]:
             lower_channel_bound, upper_channel_bound = channel_map
             if lower_channel_bound <= channel <= upper_channel_bound:
                 return lower_channel_bound + center_channel_delta
-        raise ValueError('Invalid channel for {channel_width}.'.format(
-            channel_width=self._vht_oper_chwidth))
+        raise ValueError(
+            "Invalid channel for {channel_width}.".format(
+                channel_width=self._vht_oper_chwidth
+            )
+        )
 
     @property
     def _get_default_config(self):
         """Returns: dict of default options for hostapd."""
-        if self.set_ap_defaults_profile == 'mistral':
-            return collections.OrderedDict([
-                ('logger_syslog', '-1'),
-                ('logger_syslog_level', '0'),
-                # default RTS and frag threshold to ``off''
-                ('rts_threshold', None),
-                ('fragm_threshold', None),
-                ('driver', hostapd_constants.DRIVER_NAME)
-            ])
+        if self.set_ap_defaults_profile == "mistral":
+            return collections.OrderedDict(
+                [
+                    ("logger_syslog", "-1"),
+                    ("logger_syslog_level", "0"),
+                    # default RTS and frag threshold to ``off''
+                    ("rts_threshold", None),
+                    ("fragm_threshold", None),
+                    ("driver", hostapd_constants.DRIVER_NAME),
+                ]
+            )
         else:
-            return collections.OrderedDict([
-                ('logger_syslog', '-1'),
-                ('logger_syslog_level', '0'),
-                # default RTS and frag threshold to ``off''
-                ('rts_threshold', '2347'),
-                ('fragm_threshold', '2346'),
-                ('driver', hostapd_constants.DRIVER_NAME)
-            ])
+            return collections.OrderedDict(
+                [
+                    ("logger_syslog", "-1"),
+                    ("logger_syslog_level", "0"),
+                    # default RTS and frag threshold to ``off''
+                    ("rts_threshold", "2347"),
+                    ("fragm_threshold", "2346"),
+                    ("driver", hostapd_constants.DRIVER_NAME),
+                ]
+            )
 
     @property
     def _hostapd_ht_capabilities(self):
-        """Returns: string suitable for the ht_capab= line in a hostapd config.
-        """
+        """Returns: string suitable for the ht_capab= line in a hostapd config."""
         ret = []
         for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
             if cap in self._n_capabilities:
                 ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
-        return ''.join(ret)
+        return "".join(ret)
 
     @property
     def _hostapd_vht_capabilities(self):
-        """Returns: string suitable for the vht_capab= line in a hostapd config.
-        """
+        """Returns: string suitable for the vht_capab= line in a hostapd config."""
         ret = []
         for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
             if cap in self._ac_capabilities:
                 ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
-        return ''.join(ret)
+        return "".join(ret)
 
     @property
     def _require_ht(self):
@@ -152,19 +166,23 @@
             if self._frequency > 5000:
                 return hostapd_constants.MODE_11A
             return hostapd_constants.MODE_11G
-        raise ValueError('Invalid mode.')
+        raise ValueError("Invalid mode.")
 
     @property
     def is_11n(self):
         """Returns: True if we're trying to host an 802.11n network."""
-        return self._mode in (hostapd_constants.MODE_11N_MIXED,
-                              hostapd_constants.MODE_11N_PURE)
+        return self._mode in (
+            hostapd_constants.MODE_11N_MIXED,
+            hostapd_constants.MODE_11N_PURE,
+        )
 
     @property
     def is_11ac(self):
         """Returns: True if we're trying to host an 802.11ac network."""
-        return self._mode in (hostapd_constants.MODE_11AC_MIXED,
-                              hostapd_constants.MODE_11AC_PURE)
+        return self._mode in (
+            hostapd_constants.MODE_11AC_MIXED,
+            hostapd_constants.MODE_11AC_PURE,
+        )
 
     @property
     def channel(self):
@@ -203,7 +221,7 @@
 
         """
         if value not in hostapd_constants.CHANNEL_MAP:
-            raise ValueError('Tried to set an invalid frequency: %r.' % value)
+            raise ValueError("Tried to set an invalid frequency: %r." % value)
 
         self._frequency = value
 
@@ -273,12 +291,12 @@
             return None
 
         if ht40_plus_allowed(self.channel):
-            return 'HT40+'
+            return "HT40+"
 
         if ht40_minus_allowed(self.channel):
-            return 'HT40-'
+            return "HT40-"
 
-        return 'HT20'
+        return "HT20"
 
     @property
     def beacon_footer(self):
@@ -311,36 +329,37 @@
     def wnm_features(self, value: FrozenSet[hostapd_constants.WnmFeature]):
         self._wnm_features = value
 
-    def __init__(self,
-                 interface=None,
-                 mode=None,
-                 channel=None,
-                 frequency=None,
-                 n_capabilities=[],
-                 beacon_interval=None,
-                 dtim_period=None,
-                 frag_threshold=None,
-                 rts_threshold=None,
-                 short_preamble=None,
-                 ssid=None,
-                 hidden=False,
-                 security=None,
-                 bssid=None,
-                 force_wmm=None,
-                 pmf_support=None,
-                 obss_interval=None,
-                 vht_channel_width=None,
-                 vht_center_channel=None,
-                 ac_capabilities=[],
-                 beacon_footer='',
-                 spectrum_mgmt_required=None,
-                 scenario_name=None,
-                 min_streams=None,
-                 wnm_features: FrozenSet[
-                     hostapd_constants.WnmFeature] = frozenset(),
-                 bss_settings=[],
-                 additional_parameters={},
-                 set_ap_defaults_profile='whirlwind'):
+    def __init__(
+        self,
+        interface=None,
+        mode=None,
+        channel=None,
+        frequency=None,
+        n_capabilities=[],
+        beacon_interval=None,
+        dtim_period=None,
+        frag_threshold=None,
+        rts_threshold=None,
+        short_preamble=None,
+        ssid=None,
+        hidden=False,
+        security=None,
+        bssid=None,
+        force_wmm=None,
+        pmf_support=None,
+        obss_interval=None,
+        vht_channel_width=None,
+        vht_center_channel=None,
+        ac_capabilities=[],
+        beacon_footer="",
+        spectrum_mgmt_required=None,
+        scenario_name=None,
+        min_streams=None,
+        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+        bss_settings=[],
+        additional_parameters={},
+        set_ap_defaults_profile="whirlwind",
+    ):
         """Construct a HostapdConfig.
 
         You may specify channel or frequency, but not both.  Both options
@@ -390,16 +409,16 @@
         self.set_ap_defaults_profile = set_ap_defaults_profile
         self._interface = interface
         if channel is not None and frequency is not None:
-            raise ValueError('Specify either frequency or channel '
-                             'but not both.')
+            raise ValueError("Specify either frequency or channel " "but not both.")
 
         self._wmm_enabled = False
         unknown_caps = [
-            cap for cap in n_capabilities
+            cap
+            for cap in n_capabilities
             if cap not in hostapd_constants.N_CAPABILITIES_MAPPING
         ]
         if unknown_caps:
-            raise ValueError('Unknown capabilities: %r' % unknown_caps)
+            raise ValueError("Unknown capabilities: %r" % unknown_caps)
 
         self._frequency = None
         if channel:
@@ -407,8 +426,8 @@
         elif frequency:
             self.frequency = frequency
         else:
-            raise ValueError('Specify either frequency or channel.')
-        '''
+            raise ValueError("Specify either frequency or channel.")
+        """
         if set_ap_defaults_model:
             ap_default_config = hostapd_ap_default_configs.APDefaultConfig(
                 profile_name=set_ap_defaults_model, frequency=self.frequency)
@@ -422,7 +441,7 @@
                 n_capabilities = ap_default_config.n_capabilities
             if ap_default_config.ac_capabilities:
                 ap_default_config = ap_default_config.ac_capabilities
-        '''
+        """
 
         self._n_capabilities = set(n_capabilities)
         if self._n_capabilities:
@@ -432,8 +451,10 @@
         self._mode = mode
 
         if not self.supports_frequency(self.frequency):
-            raise ValueError('Configured a mode %s that does not support '
-                             'frequency %d' % (self._mode, self.frequency))
+            raise ValueError(
+                "Configured a mode %s that does not support "
+                "frequency %d" % (self._mode, self.frequency)
+            )
 
         self._beacon_interval = beacon_interval
         self._dtim_period = dtim_period
@@ -451,12 +472,17 @@
                 self._wmm_enabled = 0
         # Default PMF Values
         if pmf_support is None:
-            if (self.security and self.security.security_mode_string ==
-                    hostapd_constants.WPA3_STRING):
+            if (
+                self.security
+                and self.security.security_mode_string == hostapd_constants.WPA3_STRING
+            ):
                 # Set PMF required for WP3
                 self._pmf_support = hostapd_constants.PMF_SUPPORT_REQUIRED
-            elif (self.security and self.security.security_mode_string in
-                  hostapd_constants.WPA3_MODE_STRINGS):
+            elif (
+                self.security
+                and self.security.security_mode_string
+                in hostapd_constants.WPA3_MODE_STRINGS
+            ):
                 # Default PMF to enabled for WPA3 mixed modes (can be
                 # overwritten by explicitly provided value)
                 self._pmf_support = hostapd_constants.PMF_SUPPORT_ENABLED
@@ -465,37 +491,40 @@
                 # overwritten by explicitly provided value)
                 self._pmf_support = hostapd_constants.PMF_SUPPORT_DISABLED
         elif pmf_support not in hostapd_constants.PMF_SUPPORT_VALUES:
-            raise ValueError('Invalid value for pmf_support: %r' % pmf_support)
-        elif (pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
-              and self.security and self.security.security_mode_string ==
-              hostapd_constants.WPA3_STRING):
-            raise ValueError('PMF support must be required with wpa3.')
+            raise ValueError("Invalid value for pmf_support: %r" % pmf_support)
+        elif (
+            pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
+            and self.security
+            and self.security.security_mode_string == hostapd_constants.WPA3_STRING
+        ):
+            raise ValueError("PMF support must be required with wpa3.")
         else:
             self._pmf_support = pmf_support
         self._obss_interval = obss_interval
         if self.is_11ac:
-            if str(vht_channel_width) == '40' or str(
-                    vht_channel_width) == '20':
+            if str(vht_channel_width) == "40" or str(vht_channel_width) == "20":
                 self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_40
-            elif str(vht_channel_width) == '80':
+            elif str(vht_channel_width) == "80":
                 self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80
-            elif str(vht_channel_width) == '160':
+            elif str(vht_channel_width) == "160":
                 self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_160
-            elif str(vht_channel_width) == '80+80':
+            elif str(vht_channel_width) == "80+80":
                 self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80_80
             elif vht_channel_width is not None:
-                raise ValueError('Invalid channel width')
+                raise ValueError("Invalid channel width")
             else:
                 logging.warning(
-                    'No channel bandwidth specified.  Using 80MHz for 11ac.')
+                    "No channel bandwidth specified.  Using 80MHz for 11ac."
+                )
                 self._vht_oper_chwidth = 1
             if vht_center_channel is not None:
                 self._vht_oper_centr_freq_seg0_idx = vht_center_channel
             elif vht_channel_width == 20:
                 self._vht_oper_centr_freq_seg0_idx = channel
             else:
-                self._vht_oper_centr_freq_seg0_idx = self._get_11ac_center_channel_from_channel(
-                    self.channel)
+                self._vht_oper_centr_freq_seg0_idx = (
+                    self._get_11ac_center_channel_from_channel(self.channel)
+                )
             self._ac_capabilities = set(ac_capabilities)
         self._beacon_footer = beacon_footer
         self._spectrum_mgmt_required = spectrum_mgmt_required
@@ -507,21 +536,34 @@
         self._bss_lookup = collections.OrderedDict()
         for bss in bss_settings:
             if bss.name in self._bss_lookup:
-                raise ValueError('Cannot have multiple bss settings with the'
-                                 ' same name.')
+                raise ValueError(
+                    "Cannot have multiple bss settings with the" " same name."
+                )
             self._bss_lookup[bss.name] = bss
 
     def __repr__(self):
         return (
-            '%s(mode=%r, channel=%r, frequency=%r, '
-            'n_capabilities=%r, beacon_interval=%r, '
-            'dtim_period=%r, frag_threshold=%r, ssid=%r, bssid=%r, '
-            'wmm_enabled=%r, security_config=%r, '
-            'spectrum_mgmt_required=%r)' %
-            (self.__class__.__name__, self._mode, self.channel, self.frequency,
-             self._n_capabilities, self._beacon_interval, self._dtim_period,
-             self._frag_threshold, self._ssid, self._bssid, self._wmm_enabled,
-             self._security, self._spectrum_mgmt_required))
+            "%s(mode=%r, channel=%r, frequency=%r, "
+            "n_capabilities=%r, beacon_interval=%r, "
+            "dtim_period=%r, frag_threshold=%r, ssid=%r, bssid=%r, "
+            "wmm_enabled=%r, security_config=%r, "
+            "spectrum_mgmt_required=%r)"
+            % (
+                self.__class__.__name__,
+                self._mode,
+                self.channel,
+                self.frequency,
+                self._n_capabilities,
+                self._beacon_interval,
+                self._dtim_period,
+                self._frag_threshold,
+                self._ssid,
+                self._bssid,
+                self._wmm_enabled,
+                self._security,
+                self._spectrum_mgmt_required,
+            )
+        )
 
     def supports_channel(self, value):
         """Check whether channel is supported by the current hardware mode.
@@ -546,24 +588,38 @@
         if self._mode == hostapd_constants.MODE_11A and frequency < 5000:
             return False
 
-        if self._mode in (hostapd_constants.MODE_11B,
-                          hostapd_constants.MODE_11G) and frequency > 5000:
+        if (
+            self._mode in (hostapd_constants.MODE_11B, hostapd_constants.MODE_11G)
+            and frequency > 5000
+        ):
             return False
 
         if frequency not in hostapd_constants.CHANNEL_MAP:
             return False
 
         channel = hostapd_constants.CHANNEL_MAP[frequency]
-        supports_plus = (channel in hostapd_constants.HT40_ALLOW_MAP[
-            hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS])
-        supports_minus = (channel in hostapd_constants.HT40_ALLOW_MAP[
-            hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS])
-        if (hostapd_constants.N_CAPABILITY_HT40_PLUS in self._n_capabilities
-                and not supports_plus):
+        supports_plus = (
+            channel
+            in hostapd_constants.HT40_ALLOW_MAP[
+                hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
+            ]
+        )
+        supports_minus = (
+            channel
+            in hostapd_constants.HT40_ALLOW_MAP[
+                hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
+            ]
+        )
+        if (
+            hostapd_constants.N_CAPABILITY_HT40_PLUS in self._n_capabilities
+            and not supports_plus
+        ):
             return False
 
-        if (hostapd_constants.N_CAPABILITY_HT40_MINUS in self._n_capabilities
-                and not supports_minus):
+        if (
+            hostapd_constants.N_CAPABILITY_HT40_MINUS in self._n_capabilities
+            and not supports_minus
+        ):
             return False
 
         return True
@@ -575,7 +631,7 @@
             bss: The bss settings to add.
         """
         if bss.name in self._bss_lookup:
-            raise ValueError('A bss with the same name already exists.')
+            raise ValueError("A bss with the same name already exists.")
 
         self._bss_lookup[bss.name] = bss
 
@@ -594,52 +650,51 @@
         conf = self._get_default_config
 
         if self._interface:
-            conf['interface'] = self._interface
+            conf["interface"] = self._interface
         if self._bssid:
-            conf['bssid'] = self._bssid
+            conf["bssid"] = self._bssid
         if self._ssid:
-            conf['ssid'] = self._ssid
-            conf['ignore_broadcast_ssid'] = 1 if self._hidden else 0
-        conf['channel'] = self.channel
-        conf['hw_mode'] = self.hw_mode
+            conf["ssid"] = self._ssid
+            conf["ignore_broadcast_ssid"] = 1 if self._hidden else 0
+        conf["channel"] = self.channel
+        conf["hw_mode"] = self.hw_mode
         if self.is_11n or self.is_11ac:
-            conf['ieee80211n'] = 1
-            conf['ht_capab'] = self._hostapd_ht_capabilities
+            conf["ieee80211n"] = 1
+            conf["ht_capab"] = self._hostapd_ht_capabilities
         if self.is_11ac:
-            conf['ieee80211ac'] = 1
-            conf['vht_oper_chwidth'] = self._vht_oper_chwidth
-            conf['vht_oper_centr_freq_seg0_idx'] = \
-                    self._vht_oper_centr_freq_seg0_idx
-            conf['vht_capab'] = self._hostapd_vht_capabilities
+            conf["ieee80211ac"] = 1
+            conf["vht_oper_chwidth"] = self._vht_oper_chwidth
+            conf["vht_oper_centr_freq_seg0_idx"] = self._vht_oper_centr_freq_seg0_idx
+            conf["vht_capab"] = self._hostapd_vht_capabilities
         if self._wmm_enabled is not None:
-            conf['wmm_enabled'] = self._wmm_enabled
+            conf["wmm_enabled"] = self._wmm_enabled
         if self._require_ht:
-            conf['require_ht'] = 1
+            conf["require_ht"] = 1
         if self._require_vht:
-            conf['require_vht'] = 1
+            conf["require_vht"] = 1
         if self._beacon_interval:
-            conf['beacon_int'] = self._beacon_interval
+            conf["beacon_int"] = self._beacon_interval
         if self._dtim_period:
-            conf['dtim_period'] = self._dtim_period
+            conf["dtim_period"] = self._dtim_period
         if self._frag_threshold:
-            conf['fragm_threshold'] = self._frag_threshold
+            conf["fragm_threshold"] = self._frag_threshold
         if self._rts_threshold:
-            conf['rts_threshold'] = self._rts_threshold
+            conf["rts_threshold"] = self._rts_threshold
         if self._pmf_support:
-            conf['ieee80211w'] = self._pmf_support
+            conf["ieee80211w"] = self._pmf_support
         if self._obss_interval:
-            conf['obss_interval'] = self._obss_interval
+            conf["obss_interval"] = self._obss_interval
         if self._short_preamble:
-            conf['preamble'] = 1
+            conf["preamble"] = 1
         if self._spectrum_mgmt_required:
             # To set spectrum_mgmt_required, we must first set
             # local_pwr_constraint. And to set local_pwr_constraint,
             # we must first set ieee80211d. And to set ieee80211d, ...
             # Point being: order matters here.
-            conf['country_code'] = 'US'  # Required for local_pwr_constraint
-            conf['ieee80211d'] = 1  # Required for local_pwr_constraint
-            conf['local_pwr_constraint'] = 0  # No local constraint
-            conf['spectrum_mgmt_required'] = 1  # Requires local_pwr_constraint
+            conf["country_code"] = "US"  # Required for local_pwr_constraint
+            conf["ieee80211d"] = 1  # Required for local_pwr_constraint
+            conf["local_pwr_constraint"] = 0  # No local constraint
+            conf["spectrum_mgmt_required"] = 1  # Requires local_pwr_constraint
 
         if self._security:
             for k, v in self._security.generate_dict().items():
@@ -659,14 +714,15 @@
             elif wnm_feature == hostapd_constants.WnmFeature.WNM_SLEEP_MODE:
                 conf.update(hostapd_constants.ENABLE_WNM_SLEEP_MODE)
             elif wnm_feature == hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT:
-                conf.update(
-                    hostapd_constants.ENABLE_WNM_BSS_TRANSITION_MANAGEMENT)
+                conf.update(hostapd_constants.ENABLE_WNM_BSS_TRANSITION_MANAGEMENT)
             elif wnm_feature == hostapd_constants.WnmFeature.PROXY_ARP:
                 conf.update(hostapd_constants.ENABLE_WNM_PROXY_ARP)
-            elif wnm_feature == hostapd_constants.WnmFeature.IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST:
+            elif (
+                wnm_feature
+                == hostapd_constants.WnmFeature.IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
+            ):
                 conf.update(
-                    hostapd_constants.
-                    ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
+                    hostapd_constants.ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
                 )
 
         if self._additional_parameters:
diff --git a/src/antlion/controllers/ap_lib/hostapd_constants.py b/src/antlion/controllers/ap_lib/hostapd_constants.py
index 3a95ffa..ae7ef85 100755
--- a/src/antlion/controllers/ap_lib/hostapd_constants.py
+++ b/src/antlion/controllers/ap_lib/hostapd_constants.py
@@ -18,8 +18,8 @@
 
 from enum import Enum, auto, unique
 
-BAND_2G = '2g'
-BAND_5G = '5g'
+BAND_2G = "2g"
+BAND_5G = "5g"
 CHANNEL_BANDWIDTH_20MHZ = 20
 CHANNEL_BANDWIDTH_40MHZ = 40
 CHANNEL_BANDWIDTH_80MHZ = 80
@@ -34,22 +34,22 @@
 MIN_WPA_PSK_LENGTH = 8
 MAX_WPA_PASSWORD_LENGTH = 63
 WPA_STRICT_REKEY = 1
-WPA_DEFAULT_CIPHER = 'TKIP'
-WPA2_DEFAULT_CIPER = 'CCMP'
+WPA_DEFAULT_CIPHER = "TKIP"
+WPA2_DEFAULT_CIPER = "CCMP"
 WPA_GROUP_KEY_ROTATION_TIME = 600
 WPA_STRICT_REKEY_DEFAULT = True
-WEP_STRING = 'wep'
-WPA_STRING = 'wpa'
-WPA2_STRING = 'wpa2'
-WPA_MIXED_STRING = 'wpa/wpa2'
-WPA3_STRING = 'wpa3'
-WPA2_WPA3_MIXED_STRING = 'wpa2/wpa3'
-WPA_WPA2_WPA3_MIXED_STRING = 'wpa/wpa2/wpa3'
-ENT_STRING = 'ent'
-ENT_KEY_MGMT = 'WPA-EAP'
-WPA_PSK_KEY_MGMT = 'WPA-PSK'
-SAE_KEY_MGMT = 'SAE'
-DUAL_WPA_PSK_SAE_KEY_MGMT = 'WPA-PSK SAE'
+WEP_STRING = "wep"
+WPA_STRING = "wpa"
+WPA2_STRING = "wpa2"
+WPA_MIXED_STRING = "wpa/wpa2"
+WPA3_STRING = "wpa3"
+WPA2_WPA3_MIXED_STRING = "wpa2/wpa3"
+WPA_WPA2_WPA3_MIXED_STRING = "wpa/wpa2/wpa3"
+ENT_STRING = "ent"
+ENT_KEY_MGMT = "WPA-EAP"
+WPA_PSK_KEY_MGMT = "WPA-PSK"
+SAE_KEY_MGMT = "SAE"
+DUAL_WPA_PSK_SAE_KEY_MGMT = "WPA-PSK SAE"
 SECURITY_STRING_TO_SECURITY_MODE_INT = {
     WPA_STRING: WPA1,
     WPA2_STRING: WPA2,
@@ -58,7 +58,7 @@
     WPA2_WPA3_MIXED_STRING: WPA3,
     WPA_WPA2_WPA3_MIXED_STRING: MIXED,
     WEP_STRING: WEP,
-    ENT_STRING: ENT
+    ENT_STRING: ENT,
 }
 SECURITY_STRING_TO_WPA_KEY_MGMT = {
     WPA_STRING: WPA_PSK_KEY_MGMT,
@@ -66,11 +66,9 @@
     WPA_MIXED_STRING: WPA_PSK_KEY_MGMT,
     WPA3_STRING: SAE_KEY_MGMT,
     WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
-    WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT
+    WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
 }
-WPA3_MODE_STRINGS = {
-    WPA3_STRING, WPA2_WPA3_MIXED_STRING, WPA_WPA2_WPA3_MIXED_STRING
-}
+WPA3_MODE_STRINGS = {WPA3_STRING, WPA2_WPA3_MIXED_STRING, WPA_WPA2_WPA3_MIXED_STRING}
 
 SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY = {
     WEP_STRING: WEP_STRING,
@@ -79,16 +77,16 @@
     WPA_MIXED_STRING: WPA2_STRING,
     WPA3_STRING: WPA3_STRING,
     WPA2_WPA3_MIXED_STRING: WPA3_STRING,
-    WPA_WPA2_WPA3_MIXED_STRING: WPA3_STRING
+    WPA_WPA2_WPA3_MIXED_STRING: WPA3_STRING,
 }
 
 IEEE8021X = 1
-WLAN0_STRING = 'wlan0'
-WLAN1_STRING = 'wlan1'
-WLAN2_STRING = 'wlan2'
-WLAN3_STRING = 'wlan3'
-WLAN0_GALE = 'wlan-2400mhz'
-WLAN1_GALE = 'wlan-5000mhz'
+WLAN0_STRING = "wlan0"
+WLAN1_STRING = "wlan1"
+WLAN2_STRING = "wlan2"
+WLAN3_STRING = "wlan3"
+WLAN0_GALE = "wlan-2400mhz"
+WLAN1_GALE = "wlan-5000mhz"
 WEP_DEFAULT_KEY = 0
 WEP_HEX_LENGTH = [10, 26, 32, 58]
 WEP_STR_LENGTH = [5, 13, 16]
@@ -171,25 +169,48 @@
     5795: 159,
     5785: 157,
     5805: 161,
-    5825: 165
+    5825: 165,
 }
 FREQUENCY_MAP = {v: k for k, v in CHANNEL_MAP.items()}
 
 US_CHANNELS_2G = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
 US_CHANNELS_5G = [
-    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128,
-    132, 136, 140, 144, 149, 153, 157, 161, 165
+    36,
+    40,
+    44,
+    48,
+    52,
+    56,
+    60,
+    64,
+    100,
+    104,
+    108,
+    112,
+    116,
+    120,
+    124,
+    128,
+    132,
+    136,
+    140,
+    144,
+    149,
+    153,
+    157,
+    161,
+    165,
 ]
 
 LOWEST_5G_CHANNEL = 36
 
-MODE_11A = 'a'
-MODE_11B = 'b'
-MODE_11G = 'g'
-MODE_11N_MIXED = 'n-mixed'
-MODE_11N_PURE = 'n-only'
-MODE_11AC_MIXED = 'ac-mixed'
-MODE_11AC_PURE = 'ac-only'
+MODE_11A = "a"
+MODE_11B = "b"
+MODE_11G = "g"
+MODE_11N_MIXED = "n-mixed"
+MODE_11N_PURE = "n-only"
+MODE_11AC_MIXED = "ac-mixed"
+MODE_11AC_PURE = "ac-only"
 
 N_CAPABILITY_LDPC = object()
 N_CAPABILITY_HT20 = object()
@@ -210,29 +231,26 @@
 N_CAPABILITY_SMPS_STATIC = object()
 N_CAPABILITY_SMPS_DYNAMIC = object()
 N_CAPABILITIES_MAPPING = {
-    N_CAPABILITY_LDPC: '[LDPC]',
-    N_CAPABILITY_HT20: '[HT20]',
-    N_CAPABILITY_HT40_PLUS: '[HT40+]',
-    N_CAPABILITY_HT40_MINUS: '[HT40-]',
-    N_CAPABILITY_GREENFIELD: '[GF]',
-    N_CAPABILITY_SGI20: '[SHORT-GI-20]',
-    N_CAPABILITY_SGI40: '[SHORT-GI-40]',
-    N_CAPABILITY_TX_STBC: '[TX-STBC]',
-    N_CAPABILITY_RX_STBC1: '[RX-STBC1]',
-    N_CAPABILITY_RX_STBC12: '[RX-STBC12]',
-    N_CAPABILITY_RX_STBC123: '[RX-STBC123]',
-    N_CAPABILITY_DSSS_CCK_40: '[DSSS_CCK-40]',
-    N_CAPABILITY_LSIG_TXOP_PROT: '[LSIG-TXOP-PROT]',
-    N_CAPABILITY_40_INTOLERANT: '[40-INTOLERANT]',
-    N_CAPABILITY_MAX_AMSDU_7935: '[MAX-AMSDU-7935]',
-    N_CAPABILITY_DELAY_BLOCK_ACK: '[DELAYED-BA]',
-    N_CAPABILITY_SMPS_STATIC: '[SMPS-STATIC]',
-    N_CAPABILITY_SMPS_DYNAMIC: '[SMPS-DYNAMIC]'
+    N_CAPABILITY_LDPC: "[LDPC]",
+    N_CAPABILITY_HT20: "[HT20]",
+    N_CAPABILITY_HT40_PLUS: "[HT40+]",
+    N_CAPABILITY_HT40_MINUS: "[HT40-]",
+    N_CAPABILITY_GREENFIELD: "[GF]",
+    N_CAPABILITY_SGI20: "[SHORT-GI-20]",
+    N_CAPABILITY_SGI40: "[SHORT-GI-40]",
+    N_CAPABILITY_TX_STBC: "[TX-STBC]",
+    N_CAPABILITY_RX_STBC1: "[RX-STBC1]",
+    N_CAPABILITY_RX_STBC12: "[RX-STBC12]",
+    N_CAPABILITY_RX_STBC123: "[RX-STBC123]",
+    N_CAPABILITY_DSSS_CCK_40: "[DSSS_CCK-40]",
+    N_CAPABILITY_LSIG_TXOP_PROT: "[LSIG-TXOP-PROT]",
+    N_CAPABILITY_40_INTOLERANT: "[40-INTOLERANT]",
+    N_CAPABILITY_MAX_AMSDU_7935: "[MAX-AMSDU-7935]",
+    N_CAPABILITY_DELAY_BLOCK_ACK: "[DELAYED-BA]",
+    N_CAPABILITY_SMPS_STATIC: "[SMPS-STATIC]",
+    N_CAPABILITY_SMPS_DYNAMIC: "[SMPS-DYNAMIC]",
 }
-N_CAPABILITIES_MAPPING_INVERSE = {
-    v: k
-    for k, v in N_CAPABILITIES_MAPPING.items()
-}
+N_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in N_CAPABILITIES_MAPPING.items()}
 N_CAPABILITY_HT40_MINUS_CHANNELS = object()
 N_CAPABILITY_HT40_PLUS_CHANNELS = object()
 AC_CAPABILITY_VHT160 = object()
@@ -272,47 +290,44 @@
 AC_CAPABILITY_MAX_MPDU_7991 = object()
 AC_CAPABILITY_MAX_MPDU_11454 = object()
 AC_CAPABILITIES_MAPPING = {
-    AC_CAPABILITY_VHT160: '[VHT160]',
-    AC_CAPABILITY_VHT160_80PLUS80: '[VHT160-80PLUS80]',
-    AC_CAPABILITY_RXLDPC: '[RXLDPC]',
-    AC_CAPABILITY_SHORT_GI_80: '[SHORT-GI-80]',
-    AC_CAPABILITY_SHORT_GI_160: '[SHORT-GI-160]',
-    AC_CAPABILITY_TX_STBC_2BY1: '[TX-STBC-2BY1]',
-    AC_CAPABILITY_RX_STBC_1: '[RX-STBC-1]',
-    AC_CAPABILITY_RX_STBC_12: '[RX-STBC-12]',
-    AC_CAPABILITY_RX_STBC_123: '[RX-STBC-123]',
-    AC_CAPABILITY_RX_STBC_1234: '[RX-STBC-1234]',
-    AC_CAPABILITY_SU_BEAMFORMER: '[SU-BEAMFORMER]',
-    AC_CAPABILITY_SU_BEAMFORMEE: '[SU-BEAMFORMEE]',
-    AC_CAPABILITY_BF_ANTENNA_2: '[BF-ANTENNA-2]',
-    AC_CAPABILITY_BF_ANTENNA_3: '[BF-ANTENNA-3]',
-    AC_CAPABILITY_BF_ANTENNA_4: '[BF-ANTENNA-4]',
-    AC_CAPABILITY_SOUNDING_DIMENSION_2: '[SOUNDING-DIMENSION-2]',
-    AC_CAPABILITY_SOUNDING_DIMENSION_3: '[SOUNDING-DIMENSION-3]',
-    AC_CAPABILITY_SOUNDING_DIMENSION_4: '[SOUNDING-DIMENSION-4]',
-    AC_CAPABILITY_MU_BEAMFORMER: '[MU-BEAMFORMER]',
-    AC_CAPABILITY_MU_BEAMFORMEE: '[MU-BEAMFORMEE]',
-    AC_CAPABILITY_VHT_TXOP_PS: '[VHT-TXOP-PS]',
-    AC_CAPABILITY_HTC_VHT: '[HTC-VHT]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0: '[MAX-A-MPDU-LEN-EXP0]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1: '[MAX-A-MPDU-LEN-EXP1]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2: '[MAX-A-MPDU-LEN-EXP2]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3: '[MAX-A-MPDU-LEN-EXP3]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4: '[MAX-A-MPDU-LEN-EXP4]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5: '[MAX-A-MPDU-LEN-EXP5]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6: '[MAX-A-MPDU-LEN-EXP6]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7: '[MAX-A-MPDU-LEN-EXP7]',
-    AC_CAPABILITY_VHT_LINK_ADAPT2: '[VHT-LINK-ADAPT2]',
-    AC_CAPABILITY_VHT_LINK_ADAPT3: '[VHT-LINK-ADAPT3]',
-    AC_CAPABILITY_RX_ANTENNA_PATTERN: '[RX-ANTENNA-PATTERN]',
-    AC_CAPABILITY_TX_ANTENNA_PATTERN: '[TX-ANTENNA-PATTERN]',
-    AC_CAPABILITY_MAX_MPDU_11454: '[MAX-MPDU-11454]',
-    AC_CAPABILITY_MAX_MPDU_7991: '[MAX-MPDU-7991]'
+    AC_CAPABILITY_VHT160: "[VHT160]",
+    AC_CAPABILITY_VHT160_80PLUS80: "[VHT160-80PLUS80]",
+    AC_CAPABILITY_RXLDPC: "[RXLDPC]",
+    AC_CAPABILITY_SHORT_GI_80: "[SHORT-GI-80]",
+    AC_CAPABILITY_SHORT_GI_160: "[SHORT-GI-160]",
+    AC_CAPABILITY_TX_STBC_2BY1: "[TX-STBC-2BY1]",
+    AC_CAPABILITY_RX_STBC_1: "[RX-STBC-1]",
+    AC_CAPABILITY_RX_STBC_12: "[RX-STBC-12]",
+    AC_CAPABILITY_RX_STBC_123: "[RX-STBC-123]",
+    AC_CAPABILITY_RX_STBC_1234: "[RX-STBC-1234]",
+    AC_CAPABILITY_SU_BEAMFORMER: "[SU-BEAMFORMER]",
+    AC_CAPABILITY_SU_BEAMFORMEE: "[SU-BEAMFORMEE]",
+    AC_CAPABILITY_BF_ANTENNA_2: "[BF-ANTENNA-2]",
+    AC_CAPABILITY_BF_ANTENNA_3: "[BF-ANTENNA-3]",
+    AC_CAPABILITY_BF_ANTENNA_4: "[BF-ANTENNA-4]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_2: "[SOUNDING-DIMENSION-2]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_3: "[SOUNDING-DIMENSION-3]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_4: "[SOUNDING-DIMENSION-4]",
+    AC_CAPABILITY_MU_BEAMFORMER: "[MU-BEAMFORMER]",
+    AC_CAPABILITY_MU_BEAMFORMEE: "[MU-BEAMFORMEE]",
+    AC_CAPABILITY_VHT_TXOP_PS: "[VHT-TXOP-PS]",
+    AC_CAPABILITY_HTC_VHT: "[HTC-VHT]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0: "[MAX-A-MPDU-LEN-EXP0]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1: "[MAX-A-MPDU-LEN-EXP1]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2: "[MAX-A-MPDU-LEN-EXP2]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3: "[MAX-A-MPDU-LEN-EXP3]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4: "[MAX-A-MPDU-LEN-EXP4]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5: "[MAX-A-MPDU-LEN-EXP5]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6: "[MAX-A-MPDU-LEN-EXP6]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7: "[MAX-A-MPDU-LEN-EXP7]",
+    AC_CAPABILITY_VHT_LINK_ADAPT2: "[VHT-LINK-ADAPT2]",
+    AC_CAPABILITY_VHT_LINK_ADAPT3: "[VHT-LINK-ADAPT3]",
+    AC_CAPABILITY_RX_ANTENNA_PATTERN: "[RX-ANTENNA-PATTERN]",
+    AC_CAPABILITY_TX_ANTENNA_PATTERN: "[TX-ANTENNA-PATTERN]",
+    AC_CAPABILITY_MAX_MPDU_11454: "[MAX-MPDU-11454]",
+    AC_CAPABILITY_MAX_MPDU_7991: "[MAX-MPDU-7991]",
 }
-AC_CAPABILITIES_MAPPING_INVERSE = {
-    v: k
-    for k, v in AC_CAPABILITIES_MAPPING.items()
-}
+AC_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in AC_CAPABILITIES_MAPPING.items()}
 VHT_CHANNEL_WIDTH_40 = 0
 VHT_CHANNEL_WIDTH_80 = 1
 VHT_CHANNEL_WIDTH_160 = 2
@@ -321,7 +336,7 @@
 VHT_CHANNEL = {
     40: VHT_CHANNEL_WIDTH_40,
     80: VHT_CHANNEL_WIDTH_80,
-    160: VHT_CHANNEL_WIDTH_160
+    160: VHT_CHANNEL_WIDTH_160,
 }
 
 # This is a loose merging of the rules for US and EU regulatory
@@ -330,1026 +345,499 @@
 # tolerate HT40+ on channel 7 (not allowed in the US).  We take the loose
 # definition so that we don't prohibit testing in either domain.
 HT40_ALLOW_MAP = {
-    N_CAPABILITY_HT40_MINUS_CHANNELS:
-    tuple(
-        itertools.chain(range(6, 14), range(40, 65, 8), range(104, 145, 8),
-                        [153, 161])),
-    N_CAPABILITY_HT40_PLUS_CHANNELS:
-    tuple(
-        itertools.chain(range(1, 8), range(36, 61, 8), range(100, 141, 8),
-                        [149, 157]))
+    N_CAPABILITY_HT40_MINUS_CHANNELS: tuple(
+        itertools.chain(range(6, 14), range(40, 65, 8), range(104, 145, 8), [153, 161])
+    ),
+    N_CAPABILITY_HT40_PLUS_CHANNELS: tuple(
+        itertools.chain(range(1, 8), range(36, 61, 8), range(100, 141, 8), [149, 157])
+    ),
 }
 
 PMF_SUPPORT_DISABLED = 0
 PMF_SUPPORT_ENABLED = 1
 PMF_SUPPORT_REQUIRED = 2
-PMF_SUPPORT_VALUES = (PMF_SUPPORT_DISABLED, PMF_SUPPORT_ENABLED,
-                      PMF_SUPPORT_REQUIRED)
+PMF_SUPPORT_VALUES = (PMF_SUPPORT_DISABLED, PMF_SUPPORT_ENABLED, PMF_SUPPORT_REQUIRED)
 
-DRIVER_NAME = 'nl80211'
+DRIVER_NAME = "nl80211"
 
 CENTER_CHANNEL_MAP = {
     VHT_CHANNEL_WIDTH_40: {
-        'delta':
-        2,
-        'channels': ((36, 40), (44, 48), (52, 56), (60, 64), (100, 104),
-                     (108, 112), (116, 120), (124, 128), (132, 136),
-                     (140, 144), (149, 153), (157, 161))
+        "delta": 2,
+        "channels": (
+            (36, 40),
+            (44, 48),
+            (52, 56),
+            (60, 64),
+            (100, 104),
+            (108, 112),
+            (116, 120),
+            (124, 128),
+            (132, 136),
+            (140, 144),
+            (149, 153),
+            (157, 161),
+        ),
     },
     VHT_CHANNEL_WIDTH_80: {
-        'delta':
-        6,
-        'channels':
-        ((36, 48), (52, 64), (100, 112), (116, 128), (132, 144), (149, 161))
+        "delta": 6,
+        "channels": (
+            (36, 48),
+            (52, 64),
+            (100, 112),
+            (116, 128),
+            (132, 144),
+            (149, 161),
+        ),
     },
-    VHT_CHANNEL_WIDTH_160: {
-        'delta': 14,
-        'channels': ((36, 64), (100, 128))
-    }
+    VHT_CHANNEL_WIDTH_160: {"delta": 14, "channels": ((36, 64), (100, 128))},
 }
 
-OFDM_DATA_RATES = {'supported_rates': '60 90 120 180 240 360 480 540'}
+OFDM_DATA_RATES = {"supported_rates": "60 90 120 180 240 360 480 540"}
 
-CCK_DATA_RATES = {'supported_rates': '10 20 55 110'}
+CCK_DATA_RATES = {"supported_rates": "10 20 55 110"}
 
 CCK_AND_OFDM_DATA_RATES = {
-    'supported_rates': '10 20 55 110 60 90 120 180 240 360 480 540'
+    "supported_rates": "10 20 55 110 60 90 120 180 240 360 480 540"
 }
 
-OFDM_ONLY_BASIC_RATES = {'basic_rates': '60 120 240'}
+OFDM_ONLY_BASIC_RATES = {"basic_rates": "60 120 240"}
 
-CCK_AND_OFDM_BASIC_RATES = {'basic_rates': '10 20 55 110'}
+CCK_AND_OFDM_BASIC_RATES = {"basic_rates": "10 20 55 110"}
 
 WEP_AUTH = {
-    'open': {
-        'auth_algs': 1
-    },
-    'shared': {
-        'auth_algs': 2
-    },
-    'open_and_shared': {
-        'auth_algs': 3
-    }
+    "open": {"auth_algs": 1},
+    "shared": {"auth_algs": 2},
+    "open_and_shared": {"auth_algs": 3},
 }
 
 WMM_11B_DEFAULT_PARAMS = {
-    'wmm_ac_bk_cwmin': 5,
-    'wmm_ac_bk_cwmax': 10,
-    'wmm_ac_bk_aifs': 7,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 3,
-    'wmm_ac_be_cwmin': 5,
-    'wmm_ac_be_cwmax': 7,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 2,
-    'wmm_ac_vi_cwmin': 4,
-    'wmm_ac_vi_cwmax': 5,
-    'wmm_ac_vi_txop_limit': 188,
-    'wmm_ac_vo_aifs': 2,
-    'wmm_ac_vo_cwmin': 3,
-    'wmm_ac_vo_cwmax': 4,
-    'wmm_ac_vo_txop_limit': 102
+    "wmm_ac_bk_cwmin": 5,
+    "wmm_ac_bk_cwmax": 10,
+    "wmm_ac_bk_aifs": 7,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 3,
+    "wmm_ac_be_cwmin": 5,
+    "wmm_ac_be_cwmax": 7,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 4,
+    "wmm_ac_vi_cwmax": 5,
+    "wmm_ac_vi_txop_limit": 188,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 3,
+    "wmm_ac_vo_cwmax": 4,
+    "wmm_ac_vo_txop_limit": 102,
 }
 
 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS = {
-    'wmm_ac_bk_cwmin': 4,
-    'wmm_ac_bk_cwmax': 10,
-    'wmm_ac_bk_aifs': 7,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 3,
-    'wmm_ac_be_cwmin': 4,
-    'wmm_ac_be_cwmax': 10,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 2,
-    'wmm_ac_vi_cwmin': 3,
-    'wmm_ac_vi_cwmax': 4,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 2,
-    'wmm_ac_vo_cwmin': 2,
-    'wmm_ac_vo_cwmax': 3,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 4,
+    "wmm_ac_bk_cwmax": 10,
+    "wmm_ac_bk_aifs": 7,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 3,
+    "wmm_ac_be_cwmin": 4,
+    "wmm_ac_be_cwmax": 10,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 3,
+    "wmm_ac_vi_cwmax": 4,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 2,
+    "wmm_ac_vo_cwmax": 3,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
 WMM_NON_DEFAULT_PARAMS = {
-    'wmm_ac_bk_cwmin': 5,
-    'wmm_ac_bk_cwmax': 9,
-    'wmm_ac_bk_aifs': 3,
-    'wmm_ac_bk_txop_limit': 94,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 2,
-    'wmm_ac_be_cwmax': 8,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 1,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 10,
-    'wmm_ac_vi_txop_limit': 47,
-    'wmm_ac_vo_aifs': 1,
-    'wmm_ac_vo_cwmin': 6,
-    'wmm_ac_vo_cwmax': 10,
-    'wmm_ac_vo_txop_limit': 94
+    "wmm_ac_bk_cwmin": 5,
+    "wmm_ac_bk_cwmax": 9,
+    "wmm_ac_bk_aifs": 3,
+    "wmm_ac_bk_txop_limit": 94,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 2,
+    "wmm_ac_be_cwmax": 8,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 1,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 10,
+    "wmm_ac_vi_txop_limit": 47,
+    "wmm_ac_vo_aifs": 1,
+    "wmm_ac_vo_cwmin": 6,
+    "wmm_ac_vo_cwmax": 10,
+    "wmm_ac_vo_txop_limit": 94,
 }
 
 WMM_DEGRADED_VO_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 2,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 2,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 10,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
 WMM_DEGRADED_VI_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 2,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 10,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 2,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
 WMM_IMPROVE_BE_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 10,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 10,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 10,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 10,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
 WMM_IMPROVE_BK_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 2,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 10,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 10,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 10,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 10,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
 }
 
-WMM_ACM_BK = {'wmm_ac_bk_acm': 1}
-WMM_ACM_BE = {'wmm_ac_be_acm': 1}
-WMM_ACM_VI = {'wmm_ac_vi_acm': 1}
-WMM_ACM_VO = {'wmm_ac_vo_acm': 1}
+WMM_ACM_BK = {"wmm_ac_bk_acm": 1}
+WMM_ACM_BE = {"wmm_ac_be_acm": 1}
+WMM_ACM_VI = {"wmm_ac_vi_acm": 1}
+WMM_ACM_VO = {"wmm_ac_vo_acm": 1}
 
-UAPSD_ENABLED = {'uapsd_advertisement_enabled': 1}
+UAPSD_ENABLED = {"uapsd_advertisement_enabled": 1}
 
-UTF_8_SSID = {'utf8_ssid': 1}
+UTF_8_SSID = {"utf8_ssid": 1}
 
-ENABLE_RRM_BEACON_REPORT = {'rrm_beacon_report': 1}
-ENABLE_RRM_NEIGHBOR_REPORT = {'rrm_neighbor_report': 1}
+ENABLE_RRM_BEACON_REPORT = {"rrm_beacon_report": 1}
+ENABLE_RRM_NEIGHBOR_REPORT = {"rrm_neighbor_report": 1}
 
 # Wireless Network Management (AKA 802.11v) features.
-ENABLE_WNM_TIME_ADVERTISEMENT = {'time_advertisement': 2, 'time_zone': 'EST5'}
-ENABLE_WNM_SLEEP_MODE = {'wnm_sleep_mode': 1}
-ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {'bss_transition': 1}
-ENABLE_WNM_PROXY_ARP = {'proxy_arp': 1}
-ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = {
-    'na_mcast_to_ucast': 1
-}
+ENABLE_WNM_TIME_ADVERTISEMENT = {"time_advertisement": 2, "time_zone": "EST5"}
+ENABLE_WNM_SLEEP_MODE = {"wnm_sleep_mode": 1}
+ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {"bss_transition": 1}
+ENABLE_WNM_PROXY_ARP = {"proxy_arp": 1}
+ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = {"na_mcast_to_ucast": 1}
 
 VENDOR_IE = {
-    'correct_length_beacon': {
-        'vendor_elements': 'dd0411223301'
+    "correct_length_beacon": {"vendor_elements": "dd0411223301"},
+    "too_short_length_beacon": {"vendor_elements": "dd0311223301"},
+    "too_long_length_beacon": {"vendor_elements": "dd0511223301"},
+    "zero_length_beacon_with_data": {"vendor_elements": "dd0011223301"},
+    "zero_length_beacon_without_data": {"vendor_elements": "dd00"},
+    "simliar_to_wpa": {"vendor_elements": "dd040050f203"},
+    "correct_length_association_response": {"assocresp_elements": "dd0411223301"},
+    "too_short_length_association_response": {"assocresp_elements": "dd0311223301"},
+    "too_long_length_association_response": {"assocresp_elements": "dd0511223301"},
+    "zero_length_association_response_with_data": {
+        "assocresp_elements": "dd0011223301"
     },
-    'too_short_length_beacon': {
-        'vendor_elements': 'dd0311223301'
-    },
-    'too_long_length_beacon': {
-        'vendor_elements': 'dd0511223301'
-    },
-    'zero_length_beacon_with_data': {
-        'vendor_elements': 'dd0011223301'
-    },
-    'zero_length_beacon_without_data': {
-        'vendor_elements': 'dd00'
-    },
-    'simliar_to_wpa': {
-        'vendor_elements': 'dd040050f203'
-    },
-    'correct_length_association_response': {
-        'assocresp_elements': 'dd0411223301'
-    },
-    'too_short_length_association_response': {
-        'assocresp_elements': 'dd0311223301'
-    },
-    'too_long_length_association_response': {
-        'assocresp_elements': 'dd0511223301'
-    },
-    'zero_length_association_response_with_data': {
-        'assocresp_elements': 'dd0011223301'
-    },
-    'zero_length_association_response_without_data': {
-        'assocresp_elements': 'dd00'
-    }
+    "zero_length_association_response_without_data": {"assocresp_elements": "dd00"},
 }
 
-ENABLE_IEEE80211D = {'ieee80211d': 1}
+ENABLE_IEEE80211D = {"ieee80211d": 1}
 
 COUNTRY_STRING = {
-    'ALL': {
-        'country3': '0x20'
-    },
-    'OUTDOOR': {
-        'country3': '0x4f'
-    },
-    'INDOOR': {
-        'country3': '0x49'
-    },
-    'NONCOUNTRY': {
-        'country3': '0x58'
-    },
-    'GLOBAL': {
-        'country3': '0x04'
-    }
+    "ALL": {"country3": "0x20"},
+    "OUTDOOR": {"country3": "0x4f"},
+    "INDOOR": {"country3": "0x49"},
+    "NONCOUNTRY": {"country3": "0x58"},
+    "GLOBAL": {"country3": "0x04"},
 }
 
 COUNTRY_CODE = {
-    'AFGHANISTAN': {
-        'country_code': 'AF'
-    },
-    'ALAND_ISLANDS': {
-        'country_code': 'AX'
-    },
-    'ALBANIA': {
-        'country_code': 'AL'
-    },
-    'ALGERIA': {
-        'country_code': 'DZ'
-    },
-    'AMERICAN_SAMOA': {
-        'country_code': 'AS'
-    },
-    'ANDORRA': {
-        'country_code': 'AD'
-    },
-    'ANGOLA': {
-        'country_code': 'AO'
-    },
-    'ANGUILLA': {
-        'country_code': 'AI'
-    },
-    'ANTARCTICA': {
-        'country_code': 'AQ'
-    },
-    'ANTIGUA_AND_BARBUDA': {
-        'country_code': 'AG'
-    },
-    'ARGENTINA': {
-        'country_code': 'AR'
-    },
-    'ARMENIA': {
-        'country_code': 'AM'
-    },
-    'ARUBA': {
-        'country_code': 'AW'
-    },
-    'AUSTRALIA': {
-        'country_code': 'AU'
-    },
-    'AUSTRIA': {
-        'country_code': 'AT'
-    },
-    'AZERBAIJAN': {
-        'country_code': 'AZ'
-    },
-    'BAHAMAS': {
-        'country_code': 'BS'
-    },
-    'BAHRAIN': {
-        'country_code': 'BH'
-    },
-    'BANGLADESH': {
-        'country_code': 'BD'
-    },
-    'BARBADOS': {
-        'country_code': 'BB'
-    },
-    'BELARUS': {
-        'country_code': 'BY'
-    },
-    'BELGIUM': {
-        'country_code': 'BE'
-    },
-    'BELIZE': {
-        'country_code': 'BZ'
-    },
-    'BENIN': {
-        'country_code': 'BJ'
-    },
-    'BERMUDA': {
-        'country_code': 'BM'
-    },
-    'BHUTAN': {
-        'country_code': 'BT'
-    },
-    'BOLIVIA': {
-        'country_code': 'BO'
-    },
-    'BONAIRE': {
-        'country_code': 'BQ'
-    },
-    'BOSNIA_AND_HERZEGOVINA': {
-        'country_code': 'BA'
-    },
-    'BOTSWANA': {
-        'country_code': 'BW'
-    },
-    'BOUVET_ISLAND': {
-        'country_code': 'BV'
-    },
-    'BRAZIL': {
-        'country_code': 'BR'
-    },
-    'BRITISH_INDIAN_OCEAN_TERRITORY': {
-        'country_code': 'IO'
-    },
-    'BRUNEI_DARUSSALAM': {
-        'country_code': 'BN'
-    },
-    'BULGARIA': {
-        'country_code': 'BG'
-    },
-    'BURKINA_FASO': {
-        'country_code': 'BF'
-    },
-    'BURUNDI': {
-        'country_code': 'BI'
-    },
-    'CAMBODIA': {
-        'country_code': 'KH'
-    },
-    'CAMEROON': {
-        'country_code': 'CM'
-    },
-    'CANADA': {
-        'country_code': 'CA'
-    },
-    'CAPE_VERDE': {
-        'country_code': 'CV'
-    },
-    'CAYMAN_ISLANDS': {
-        'country_code': 'KY'
-    },
-    'CENTRAL_AFRICAN_REPUBLIC': {
-        'country_code': 'CF'
-    },
-    'CHAD': {
-        'country_code': 'TD'
-    },
-    'CHILE': {
-        'country_code': 'CL'
-    },
-    'CHINA': {
-        'country_code': 'CN'
-    },
-    'CHRISTMAS_ISLAND': {
-        'country_code': 'CX'
-    },
-    'COCOS_ISLANDS': {
-        'country_code': 'CC'
-    },
-    'COLOMBIA': {
-        'country_code': 'CO'
-    },
-    'COMOROS': {
-        'country_code': 'KM'
-    },
-    'CONGO': {
-        'country_code': 'CG'
-    },
-    'DEMOCRATIC_REPUBLIC_CONGO': {
-        'country_code': 'CD'
-    },
-    'COOK_ISLANDS': {
-        'country_code': 'CK'
-    },
-    'COSTA_RICA': {
-        'country_code': 'CR'
-    },
-    'COTE_D_IVOIRE': {
-        'country_code': 'CI'
-    },
-    'CROATIA': {
-        'country_code': 'HR'
-    },
-    'CUBA': {
-        'country_code': 'CU'
-    },
-    'CURACAO': {
-        'country_code': 'CW'
-    },
-    'CYPRUS': {
-        'country_code': 'CY'
-    },
-    'CZECH_REPUBLIC': {
-        'country_code': 'CZ'
-    },
-    'DENMARK': {
-        'country_code': 'DK'
-    },
-    'DJIBOUTI': {
-        'country_code': 'DJ'
-    },
-    'DOMINICA': {
-        'country_code': 'DM'
-    },
-    'DOMINICAN_REPUBLIC': {
-        'country_code': 'DO'
-    },
-    'ECUADOR': {
-        'country_code': 'EC'
-    },
-    'EGYPT': {
-        'country_code': 'EG'
-    },
-    'EL_SALVADOR': {
-        'country_code': 'SV'
-    },
-    'EQUATORIAL_GUINEA': {
-        'country_code': 'GQ'
-    },
-    'ERITREA': {
-        'country_code': 'ER'
-    },
-    'ESTONIA': {
-        'country_code': 'EE'
-    },
-    'ETHIOPIA': {
-        'country_code': 'ET'
-    },
-    'FALKLAND_ISLANDS_(MALVINAS)': {
-        'country_code': 'FK'
-    },
-    'FAROE_ISLANDS': {
-        'country_code': 'FO'
-    },
-    'FIJI': {
-        'country_code': 'FJ'
-    },
-    'FINLAND': {
-        'country_code': 'FI'
-    },
-    'FRANCE': {
-        'country_code': 'FR'
-    },
-    'FRENCH_GUIANA': {
-        'country_code': 'GF'
-    },
-    'FRENCH_POLYNESIA': {
-        'country_code': 'PF'
-    },
-    'FRENCH_SOUTHERN_TERRITORIES': {
-        'country_code': 'TF'
-    },
-    'GABON': {
-        'country_code': 'GA'
-    },
-    'GAMBIA': {
-        'country_code': 'GM'
-    },
-    'GEORGIA': {
-        'country_code': 'GE'
-    },
-    'GERMANY': {
-        'country_code': 'DE'
-    },
-    'GHANA': {
-        'country_code': 'GH'
-    },
-    'GIBRALTAR': {
-        'country_code': 'GI'
-    },
-    'GREECE': {
-        'country_code': 'GR'
-    },
-    'GREENLAND': {
-        'country_code': 'GL'
-    },
-    'GRENADA': {
-        'country_code': 'GD'
-    },
-    'GUADELOUPE': {
-        'country_code': 'GP'
-    },
-    'GUAM': {
-        'country_code': 'GU'
-    },
-    'GUATEMALA': {
-        'country_code': 'GT'
-    },
-    'GUERNSEY': {
-        'country_code': 'GG'
-    },
-    'GUINEA': {
-        'country_code': 'GN'
-    },
-    'GUINEA-BISSAU': {
-        'country_code': 'GW'
-    },
-    'GUYANA': {
-        'country_code': 'GY'
-    },
-    'HAITI': {
-        'country_code': 'HT'
-    },
-    'HEARD_ISLAND_AND_MCDONALD_ISLANDS': {
-        'country_code': 'HM'
-    },
-    'VATICAN_CITY_STATE': {
-        'country_code': 'VA'
-    },
-    'HONDURAS': {
-        'country_code': 'HN'
-    },
-    'HONG_KONG': {
-        'country_code': 'HK'
-    },
-    'HUNGARY': {
-        'country_code': 'HU'
-    },
-    'ICELAND': {
-        'country_code': 'IS'
-    },
-    'INDIA': {
-        'country_code': 'IN'
-    },
-    'INDONESIA': {
-        'country_code': 'ID'
-    },
-    'IRAN': {
-        'country_code': 'IR'
-    },
-    'IRAQ': {
-        'country_code': 'IQ'
-    },
-    'IRELAND': {
-        'country_code': 'IE'
-    },
-    'ISLE_OF_MAN': {
-        'country_code': 'IM'
-    },
-    'ISRAEL': {
-        'country_code': 'IL'
-    },
-    'ITALY': {
-        'country_code': 'IT'
-    },
-    'JAMAICA': {
-        'country_code': 'JM'
-    },
-    'JAPAN': {
-        'country_code': 'JP'
-    },
-    'JERSEY': {
-        'country_code': 'JE'
-    },
-    'JORDAN': {
-        'country_code': 'JO'
-    },
-    'KAZAKHSTAN': {
-        'country_code': 'KZ'
-    },
-    'KENYA': {
-        'country_code': 'KE'
-    },
-    'KIRIBATI': {
-        'country_code': 'KI'
-    },
-    'DEMOCRATIC_PEOPLE_S_REPUBLIC_OF_KOREA': {
-        'country_code': 'KP'
-    },
-    'REPUBLIC_OF_KOREA': {
-        'country_code': 'KR'
-    },
-    'KUWAIT': {
-        'country_code': 'KW'
-    },
-    'KYRGYZSTAN': {
-        'country_code': 'KG'
-    },
-    'LAO': {
-        'country_code': 'LA'
-    },
-    'LATVIA': {
-        'country_code': 'LV'
-    },
-    'LEBANON': {
-        'country_code': 'LB'
-    },
-    'LESOTHO': {
-        'country_code': 'LS'
-    },
-    'LIBERIA': {
-        'country_code': 'LR'
-    },
-    'LIBYA': {
-        'country_code': 'LY'
-    },
-    'LIECHTENSTEIN': {
-        'country_code': 'LI'
-    },
-    'LITHUANIA': {
-        'country_code': 'LT'
-    },
-    'LUXEMBOURG': {
-        'country_code': 'LU'
-    },
-    'MACAO': {
-        'country_code': 'MO'
-    },
-    'MACEDONIA': {
-        'country_code': 'MK'
-    },
-    'MADAGASCAR': {
-        'country_code': 'MG'
-    },
-    'MALAWI': {
-        'country_code': 'MW'
-    },
-    'MALAYSIA': {
-        'country_code': 'MY'
-    },
-    'MALDIVES': {
-        'country_code': 'MV'
-    },
-    'MALI': {
-        'country_code': 'ML'
-    },
-    'MALTA': {
-        'country_code': 'MT'
-    },
-    'MARSHALL_ISLANDS': {
-        'country_code': 'MH'
-    },
-    'MARTINIQUE': {
-        'country_code': 'MQ'
-    },
-    'MAURITANIA': {
-        'country_code': 'MR'
-    },
-    'MAURITIUS': {
-        'country_code': 'MU'
-    },
-    'MAYOTTE': {
-        'country_code': 'YT'
-    },
-    'MEXICO': {
-        'country_code': 'MX'
-    },
-    'MICRONESIA': {
-        'country_code': 'FM'
-    },
-    'MOLDOVA': {
-        'country_code': 'MD'
-    },
-    'MONACO': {
-        'country_code': 'MC'
-    },
-    'MONGOLIA': {
-        'country_code': 'MN'
-    },
-    'MONTENEGRO': {
-        'country_code': 'ME'
-    },
-    'MONTSERRAT': {
-        'country_code': 'MS'
-    },
-    'MOROCCO': {
-        'country_code': 'MA'
-    },
-    'MOZAMBIQUE': {
-        'country_code': 'MZ'
-    },
-    'MYANMAR': {
-        'country_code': 'MM'
-    },
-    'NAMIBIA': {
-        'country_code': 'NA'
-    },
-    'NAURU': {
-        'country_code': 'NR'
-    },
-    'NEPAL': {
-        'country_code': 'NP'
-    },
-    'NETHERLANDS': {
-        'country_code': 'NL'
-    },
-    'NEW_CALEDONIA': {
-        'country_code': 'NC'
-    },
-    'NEW_ZEALAND': {
-        'country_code': 'NZ'
-    },
-    'NICARAGUA': {
-        'country_code': 'NI'
-    },
-    'NIGER': {
-        'country_code': 'NE'
-    },
-    'NIGERIA': {
-        'country_code': 'NG'
-    },
-    'NIUE': {
-        'country_code': 'NU'
-    },
-    'NORFOLK_ISLAND': {
-        'country_code': 'NF'
-    },
-    'NORTHERN_MARIANA_ISLANDS': {
-        'country_code': 'MP'
-    },
-    'NORWAY': {
-        'country_code': 'NO'
-    },
-    'OMAN': {
-        'country_code': 'OM'
-    },
-    'PAKISTAN': {
-        'country_code': 'PK'
-    },
-    'PALAU': {
-        'country_code': 'PW'
-    },
-    'PALESTINE': {
-        'country_code': 'PS'
-    },
-    'PANAMA': {
-        'country_code': 'PA'
-    },
-    'PAPUA_NEW_GUINEA': {
-        'country_code': 'PG'
-    },
-    'PARAGUAY': {
-        'country_code': 'PY'
-    },
-    'PERU': {
-        'country_code': 'PE'
-    },
-    'PHILIPPINES': {
-        'country_code': 'PH'
-    },
-    'PITCAIRN': {
-        'country_code': 'PN'
-    },
-    'POLAND': {
-        'country_code': 'PL'
-    },
-    'PORTUGAL': {
-        'country_code': 'PT'
-    },
-    'PUERTO_RICO': {
-        'country_code': 'PR'
-    },
-    'QATAR': {
-        'country_code': 'QA'
-    },
-    'RÉUNION': {
-        'country_code': 'RE'
-    },
-    'ROMANIA': {
-        'country_code': 'RO'
-    },
-    'RUSSIAN_FEDERATION': {
-        'country_code': 'RU'
-    },
-    'RWANDA': {
-        'country_code': 'RW'
-    },
-    'SAINT_BARTHELEMY': {
-        'country_code': 'BL'
-    },
-    'SAINT_KITTS_AND_NEVIS': {
-        'country_code': 'KN'
-    },
-    'SAINT_LUCIA': {
-        'country_code': 'LC'
-    },
-    'SAINT_MARTIN': {
-        'country_code': 'MF'
-    },
-    'SAINT_PIERRE_AND_MIQUELON': {
-        'country_code': 'PM'
-    },
-    'SAINT_VINCENT_AND_THE_GRENADINES': {
-        'country_code': 'VC'
-    },
-    'SAMOA': {
-        'country_code': 'WS'
-    },
-    'SAN_MARINO': {
-        'country_code': 'SM'
-    },
-    'SAO_TOME_AND_PRINCIPE': {
-        'country_code': 'ST'
-    },
-    'SAUDI_ARABIA': {
-        'country_code': 'SA'
-    },
-    'SENEGAL': {
-        'country_code': 'SN'
-    },
-    'SERBIA': {
-        'country_code': 'RS'
-    },
-    'SEYCHELLES': {
-        'country_code': 'SC'
-    },
-    'SIERRA_LEONE': {
-        'country_code': 'SL'
-    },
-    'SINGAPORE': {
-        'country_code': 'SG'
-    },
-    'SINT_MAARTEN': {
-        'country_code': 'SX'
-    },
-    'SLOVAKIA': {
-        'country_code': 'SK'
-    },
-    'SLOVENIA': {
-        'country_code': 'SI'
-    },
-    'SOLOMON_ISLANDS': {
-        'country_code': 'SB'
-    },
-    'SOMALIA': {
-        'country_code': 'SO'
-    },
-    'SOUTH_AFRICA': {
-        'country_code': 'ZA'
-    },
-    'SOUTH_GEORGIA': {
-        'country_code': 'GS'
-    },
-    'SOUTH_SUDAN': {
-        'country_code': 'SS'
-    },
-    'SPAIN': {
-        'country_code': 'ES'
-    },
-    'SRI_LANKA': {
-        'country_code': 'LK'
-    },
-    'SUDAN': {
-        'country_code': 'SD'
-    },
-    'SURINAME': {
-        'country_code': 'SR'
-    },
-    'SVALBARD_AND_JAN_MAYEN': {
-        'country_code': 'SJ'
-    },
-    'SWAZILAND': {
-        'country_code': 'SZ'
-    },
-    'SWEDEN': {
-        'country_code': 'SE'
-    },
-    'SWITZERLAND': {
-        'country_code': 'CH'
-    },
-    'SYRIAN_ARAB_REPUBLIC': {
-        'country_code': 'SY'
-    },
-    'TAIWAN': {
-        'country_code': 'TW'
-    },
-    'TAJIKISTAN': {
-        'country_code': 'TJ'
-    },
-    'TANZANIA': {
-        'country_code': 'TZ'
-    },
-    'THAILAND': {
-        'country_code': 'TH'
-    },
-    'TIMOR-LESTE': {
-        'country_code': 'TL'
-    },
-    'TOGO': {
-        'country_code': 'TG'
-    },
-    'TOKELAU': {
-        'country_code': 'TK'
-    },
-    'TONGA': {
-        'country_code': 'TO'
-    },
-    'TRINIDAD_AND_TOBAGO': {
-        'country_code': 'TT'
-    },
-    'TUNISIA': {
-        'country_code': 'TN'
-    },
-    'TURKEY': {
-        'country_code': 'TR'
-    },
-    'TURKMENISTAN': {
-        'country_code': 'TM'
-    },
-    'TURKS_AND_CAICOS_ISLANDS': {
-        'country_code': 'TC'
-    },
-    'TUVALU': {
-        'country_code': 'TV'
-    },
-    'UGANDA': {
-        'country_code': 'UG'
-    },
-    'UKRAINE': {
-        'country_code': 'UA'
-    },
-    'UNITED_ARAB_EMIRATES': {
-        'country_code': 'AE'
-    },
-    'UNITED_KINGDOM': {
-        'country_code': 'GB'
-    },
-    'UNITED_STATES': {
-        'country_code': 'US'
-    },
-    'UNITED_STATES_MINOR_OUTLYING_ISLANDS': {
-        'country_code': 'UM'
-    },
-    'URUGUAY': {
-        'country_code': 'UY'
-    },
-    'UZBEKISTAN': {
-        'country_code': 'UZ'
-    },
-    'VANUATU': {
-        'country_code': 'VU'
-    },
-    'VENEZUELA': {
-        'country_code': 'VE'
-    },
-    'VIETNAM': {
-        'country_code': 'VN'
-    },
-    'VIRGIN_ISLANDS_BRITISH': {
-        'country_code': 'VG'
-    },
-    'VIRGIN_ISLANDS_US': {
-        'country_code': 'VI'
-    },
-    'WALLIS_AND_FUTUNA': {
-        'country_code': 'WF'
-    },
-    'WESTERN_SAHARA': {
-        'country_code': 'EH'
-    },
-    'YEMEN': {
-        'country_code': 'YE'
-    },
-    'ZAMBIA': {
-        'country_code': 'ZM'
-    },
-    'ZIMBABWE': {
-        'country_code': 'ZW'
-    },
-    'NON_COUNTRY': {
-        'country_code': 'XX'
-    }
+    "AFGHANISTAN": {"country_code": "AF"},
+    "ALAND_ISLANDS": {"country_code": "AX"},
+    "ALBANIA": {"country_code": "AL"},
+    "ALGERIA": {"country_code": "DZ"},
+    "AMERICAN_SAMOA": {"country_code": "AS"},
+    "ANDORRA": {"country_code": "AD"},
+    "ANGOLA": {"country_code": "AO"},
+    "ANGUILLA": {"country_code": "AI"},
+    "ANTARCTICA": {"country_code": "AQ"},
+    "ANTIGUA_AND_BARBUDA": {"country_code": "AG"},
+    "ARGENTINA": {"country_code": "AR"},
+    "ARMENIA": {"country_code": "AM"},
+    "ARUBA": {"country_code": "AW"},
+    "AUSTRALIA": {"country_code": "AU"},
+    "AUSTRIA": {"country_code": "AT"},
+    "AZERBAIJAN": {"country_code": "AZ"},
+    "BAHAMAS": {"country_code": "BS"},
+    "BAHRAIN": {"country_code": "BH"},
+    "BANGLADESH": {"country_code": "BD"},
+    "BARBADOS": {"country_code": "BB"},
+    "BELARUS": {"country_code": "BY"},
+    "BELGIUM": {"country_code": "BE"},
+    "BELIZE": {"country_code": "BZ"},
+    "BENIN": {"country_code": "BJ"},
+    "BERMUDA": {"country_code": "BM"},
+    "BHUTAN": {"country_code": "BT"},
+    "BOLIVIA": {"country_code": "BO"},
+    "BONAIRE": {"country_code": "BQ"},
+    "BOSNIA_AND_HERZEGOVINA": {"country_code": "BA"},
+    "BOTSWANA": {"country_code": "BW"},
+    "BOUVET_ISLAND": {"country_code": "BV"},
+    "BRAZIL": {"country_code": "BR"},
+    "BRITISH_INDIAN_OCEAN_TERRITORY": {"country_code": "IO"},
+    "BRUNEI_DARUSSALAM": {"country_code": "BN"},
+    "BULGARIA": {"country_code": "BG"},
+    "BURKINA_FASO": {"country_code": "BF"},
+    "BURUNDI": {"country_code": "BI"},
+    "CAMBODIA": {"country_code": "KH"},
+    "CAMEROON": {"country_code": "CM"},
+    "CANADA": {"country_code": "CA"},
+    "CAPE_VERDE": {"country_code": "CV"},
+    "CAYMAN_ISLANDS": {"country_code": "KY"},
+    "CENTRAL_AFRICAN_REPUBLIC": {"country_code": "CF"},
+    "CHAD": {"country_code": "TD"},
+    "CHILE": {"country_code": "CL"},
+    "CHINA": {"country_code": "CN"},
+    "CHRISTMAS_ISLAND": {"country_code": "CX"},
+    "COCOS_ISLANDS": {"country_code": "CC"},
+    "COLOMBIA": {"country_code": "CO"},
+    "COMOROS": {"country_code": "KM"},
+    "CONGO": {"country_code": "CG"},
+    "DEMOCRATIC_REPUBLIC_CONGO": {"country_code": "CD"},
+    "COOK_ISLANDS": {"country_code": "CK"},
+    "COSTA_RICA": {"country_code": "CR"},
+    "COTE_D_IVOIRE": {"country_code": "CI"},
+    "CROATIA": {"country_code": "HR"},
+    "CUBA": {"country_code": "CU"},
+    "CURACAO": {"country_code": "CW"},
+    "CYPRUS": {"country_code": "CY"},
+    "CZECH_REPUBLIC": {"country_code": "CZ"},
+    "DENMARK": {"country_code": "DK"},
+    "DJIBOUTI": {"country_code": "DJ"},
+    "DOMINICA": {"country_code": "DM"},
+    "DOMINICAN_REPUBLIC": {"country_code": "DO"},
+    "ECUADOR": {"country_code": "EC"},
+    "EGYPT": {"country_code": "EG"},
+    "EL_SALVADOR": {"country_code": "SV"},
+    "EQUATORIAL_GUINEA": {"country_code": "GQ"},
+    "ERITREA": {"country_code": "ER"},
+    "ESTONIA": {"country_code": "EE"},
+    "ETHIOPIA": {"country_code": "ET"},
+    "FALKLAND_ISLANDS_(MALVINAS)": {"country_code": "FK"},
+    "FAROE_ISLANDS": {"country_code": "FO"},
+    "FIJI": {"country_code": "FJ"},
+    "FINLAND": {"country_code": "FI"},
+    "FRANCE": {"country_code": "FR"},
+    "FRENCH_GUIANA": {"country_code": "GF"},
+    "FRENCH_POLYNESIA": {"country_code": "PF"},
+    "FRENCH_SOUTHERN_TERRITORIES": {"country_code": "TF"},
+    "GABON": {"country_code": "GA"},
+    "GAMBIA": {"country_code": "GM"},
+    "GEORGIA": {"country_code": "GE"},
+    "GERMANY": {"country_code": "DE"},
+    "GHANA": {"country_code": "GH"},
+    "GIBRALTAR": {"country_code": "GI"},
+    "GREECE": {"country_code": "GR"},
+    "GREENLAND": {"country_code": "GL"},
+    "GRENADA": {"country_code": "GD"},
+    "GUADELOUPE": {"country_code": "GP"},
+    "GUAM": {"country_code": "GU"},
+    "GUATEMALA": {"country_code": "GT"},
+    "GUERNSEY": {"country_code": "GG"},
+    "GUINEA": {"country_code": "GN"},
+    "GUINEA-BISSAU": {"country_code": "GW"},
+    "GUYANA": {"country_code": "GY"},
+    "HAITI": {"country_code": "HT"},
+    "HEARD_ISLAND_AND_MCDONALD_ISLANDS": {"country_code": "HM"},
+    "VATICAN_CITY_STATE": {"country_code": "VA"},
+    "HONDURAS": {"country_code": "HN"},
+    "HONG_KONG": {"country_code": "HK"},
+    "HUNGARY": {"country_code": "HU"},
+    "ICELAND": {"country_code": "IS"},
+    "INDIA": {"country_code": "IN"},
+    "INDONESIA": {"country_code": "ID"},
+    "IRAN": {"country_code": "IR"},
+    "IRAQ": {"country_code": "IQ"},
+    "IRELAND": {"country_code": "IE"},
+    "ISLE_OF_MAN": {"country_code": "IM"},
+    "ISRAEL": {"country_code": "IL"},
+    "ITALY": {"country_code": "IT"},
+    "JAMAICA": {"country_code": "JM"},
+    "JAPAN": {"country_code": "JP"},
+    "JERSEY": {"country_code": "JE"},
+    "JORDAN": {"country_code": "JO"},
+    "KAZAKHSTAN": {"country_code": "KZ"},
+    "KENYA": {"country_code": "KE"},
+    "KIRIBATI": {"country_code": "KI"},
+    "DEMOCRATIC_PEOPLE_S_REPUBLIC_OF_KOREA": {"country_code": "KP"},
+    "REPUBLIC_OF_KOREA": {"country_code": "KR"},
+    "KUWAIT": {"country_code": "KW"},
+    "KYRGYZSTAN": {"country_code": "KG"},
+    "LAO": {"country_code": "LA"},
+    "LATVIA": {"country_code": "LV"},
+    "LEBANON": {"country_code": "LB"},
+    "LESOTHO": {"country_code": "LS"},
+    "LIBERIA": {"country_code": "LR"},
+    "LIBYA": {"country_code": "LY"},
+    "LIECHTENSTEIN": {"country_code": "LI"},
+    "LITHUANIA": {"country_code": "LT"},
+    "LUXEMBOURG": {"country_code": "LU"},
+    "MACAO": {"country_code": "MO"},
+    "MACEDONIA": {"country_code": "MK"},
+    "MADAGASCAR": {"country_code": "MG"},
+    "MALAWI": {"country_code": "MW"},
+    "MALAYSIA": {"country_code": "MY"},
+    "MALDIVES": {"country_code": "MV"},
+    "MALI": {"country_code": "ML"},
+    "MALTA": {"country_code": "MT"},
+    "MARSHALL_ISLANDS": {"country_code": "MH"},
+    "MARTINIQUE": {"country_code": "MQ"},
+    "MAURITANIA": {"country_code": "MR"},
+    "MAURITIUS": {"country_code": "MU"},
+    "MAYOTTE": {"country_code": "YT"},
+    "MEXICO": {"country_code": "MX"},
+    "MICRONESIA": {"country_code": "FM"},
+    "MOLDOVA": {"country_code": "MD"},
+    "MONACO": {"country_code": "MC"},
+    "MONGOLIA": {"country_code": "MN"},
+    "MONTENEGRO": {"country_code": "ME"},
+    "MONTSERRAT": {"country_code": "MS"},
+    "MOROCCO": {"country_code": "MA"},
+    "MOZAMBIQUE": {"country_code": "MZ"},
+    "MYANMAR": {"country_code": "MM"},
+    "NAMIBIA": {"country_code": "NA"},
+    "NAURU": {"country_code": "NR"},
+    "NEPAL": {"country_code": "NP"},
+    "NETHERLANDS": {"country_code": "NL"},
+    "NEW_CALEDONIA": {"country_code": "NC"},
+    "NEW_ZEALAND": {"country_code": "NZ"},
+    "NICARAGUA": {"country_code": "NI"},
+    "NIGER": {"country_code": "NE"},
+    "NIGERIA": {"country_code": "NG"},
+    "NIUE": {"country_code": "NU"},
+    "NORFOLK_ISLAND": {"country_code": "NF"},
+    "NORTHERN_MARIANA_ISLANDS": {"country_code": "MP"},
+    "NORWAY": {"country_code": "NO"},
+    "OMAN": {"country_code": "OM"},
+    "PAKISTAN": {"country_code": "PK"},
+    "PALAU": {"country_code": "PW"},
+    "PALESTINE": {"country_code": "PS"},
+    "PANAMA": {"country_code": "PA"},
+    "PAPUA_NEW_GUINEA": {"country_code": "PG"},
+    "PARAGUAY": {"country_code": "PY"},
+    "PERU": {"country_code": "PE"},
+    "PHILIPPINES": {"country_code": "PH"},
+    "PITCAIRN": {"country_code": "PN"},
+    "POLAND": {"country_code": "PL"},
+    "PORTUGAL": {"country_code": "PT"},
+    "PUERTO_RICO": {"country_code": "PR"},
+    "QATAR": {"country_code": "QA"},
+    "RÉUNION": {"country_code": "RE"},
+    "ROMANIA": {"country_code": "RO"},
+    "RUSSIAN_FEDERATION": {"country_code": "RU"},
+    "RWANDA": {"country_code": "RW"},
+    "SAINT_BARTHELEMY": {"country_code": "BL"},
+    "SAINT_KITTS_AND_NEVIS": {"country_code": "KN"},
+    "SAINT_LUCIA": {"country_code": "LC"},
+    "SAINT_MARTIN": {"country_code": "MF"},
+    "SAINT_PIERRE_AND_MIQUELON": {"country_code": "PM"},
+    "SAINT_VINCENT_AND_THE_GRENADINES": {"country_code": "VC"},
+    "SAMOA": {"country_code": "WS"},
+    "SAN_MARINO": {"country_code": "SM"},
+    "SAO_TOME_AND_PRINCIPE": {"country_code": "ST"},
+    "SAUDI_ARABIA": {"country_code": "SA"},
+    "SENEGAL": {"country_code": "SN"},
+    "SERBIA": {"country_code": "RS"},
+    "SEYCHELLES": {"country_code": "SC"},
+    "SIERRA_LEONE": {"country_code": "SL"},
+    "SINGAPORE": {"country_code": "SG"},
+    "SINT_MAARTEN": {"country_code": "SX"},
+    "SLOVAKIA": {"country_code": "SK"},
+    "SLOVENIA": {"country_code": "SI"},
+    "SOLOMON_ISLANDS": {"country_code": "SB"},
+    "SOMALIA": {"country_code": "SO"},
+    "SOUTH_AFRICA": {"country_code": "ZA"},
+    "SOUTH_GEORGIA": {"country_code": "GS"},
+    "SOUTH_SUDAN": {"country_code": "SS"},
+    "SPAIN": {"country_code": "ES"},
+    "SRI_LANKA": {"country_code": "LK"},
+    "SUDAN": {"country_code": "SD"},
+    "SURINAME": {"country_code": "SR"},
+    "SVALBARD_AND_JAN_MAYEN": {"country_code": "SJ"},
+    "SWAZILAND": {"country_code": "SZ"},
+    "SWEDEN": {"country_code": "SE"},
+    "SWITZERLAND": {"country_code": "CH"},
+    "SYRIAN_ARAB_REPUBLIC": {"country_code": "SY"},
+    "TAIWAN": {"country_code": "TW"},
+    "TAJIKISTAN": {"country_code": "TJ"},
+    "TANZANIA": {"country_code": "TZ"},
+    "THAILAND": {"country_code": "TH"},
+    "TIMOR-LESTE": {"country_code": "TL"},
+    "TOGO": {"country_code": "TG"},
+    "TOKELAU": {"country_code": "TK"},
+    "TONGA": {"country_code": "TO"},
+    "TRINIDAD_AND_TOBAGO": {"country_code": "TT"},
+    "TUNISIA": {"country_code": "TN"},
+    "TURKEY": {"country_code": "TR"},
+    "TURKMENISTAN": {"country_code": "TM"},
+    "TURKS_AND_CAICOS_ISLANDS": {"country_code": "TC"},
+    "TUVALU": {"country_code": "TV"},
+    "UGANDA": {"country_code": "UG"},
+    "UKRAINE": {"country_code": "UA"},
+    "UNITED_ARAB_EMIRATES": {"country_code": "AE"},
+    "UNITED_KINGDOM": {"country_code": "GB"},
+    "UNITED_STATES": {"country_code": "US"},
+    "UNITED_STATES_MINOR_OUTLYING_ISLANDS": {"country_code": "UM"},
+    "URUGUAY": {"country_code": "UY"},
+    "UZBEKISTAN": {"country_code": "UZ"},
+    "VANUATU": {"country_code": "VU"},
+    "VENEZUELA": {"country_code": "VE"},
+    "VIETNAM": {"country_code": "VN"},
+    "VIRGIN_ISLANDS_BRITISH": {"country_code": "VG"},
+    "VIRGIN_ISLANDS_US": {"country_code": "VI"},
+    "WALLIS_AND_FUTUNA": {"country_code": "WF"},
+    "WESTERN_SAHARA": {"country_code": "EH"},
+    "YEMEN": {"country_code": "YE"},
+    "ZAMBIA": {"country_code": "ZM"},
+    "ZIMBABWE": {"country_code": "ZW"},
+    "NON_COUNTRY": {"country_code": "XX"},
 }
 
 ALL_CHANNELS_2G = {
@@ -1366,7 +854,7 @@
     11: {20, 40},
     12: {20, 40},
     13: {20, 40},
-    14: {20}
+    14: {20},
 }
 
 ALL_CHANNELS_5G = {
@@ -1394,7 +882,7 @@
     153: {20, 40, 80},
     157: {20, 40, 80},
     161: {20, 40, 80},
-    165: {20}
+    165: {20},
 }
 
 ALL_CHANNELS = {**ALL_CHANNELS_2G, **ALL_CHANNELS_5G}
@@ -1403,6 +891,7 @@
 @unique
 class WnmFeature(Enum):
     """Wireless Network Management (AKA 802.11v) features hostapd supports."""
+
     TIME_ADVERTISEMENT = auto()
     WNM_SLEEP_MODE = auto()
     BSS_TRANSITION_MANAGEMENT = auto()
diff --git a/src/antlion/controllers/ap_lib/hostapd_security.py b/src/antlion/controllers/ap_lib/hostapd_security.py
index 372ca44..69d5c2f 100644
--- a/src/antlion/controllers/ap_lib/hostapd_security.py
+++ b/src/antlion/controllers/ap_lib/hostapd_security.py
@@ -15,24 +15,29 @@
 import collections
 import string
 
+from typing import Dict, Optional, Union
+
 from antlion.controllers.ap_lib import hostapd_constants
 
 
 class Security(object):
     """The Security class for hostapd representing some of the security
-       settings that are allowed in hostapd.  If needed more can be added.
+    settings that are allowed in hostapd.  If needed more can be added.
     """
-    def __init__(self,
-                 security_mode=None,
-                 password=None,
-                 wpa_cipher=hostapd_constants.WPA_DEFAULT_CIPHER,
-                 wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
-                 wpa_group_rekey=hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
-                 wpa_strict_rekey=hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
-                 wep_default_key=hostapd_constants.WEP_DEFAULT_KEY,
-                 radius_server_ip=None,
-                 radius_server_port=None,
-                 radius_server_secret=None):
+
+    def __init__(
+        self,
+        security_mode: Optional[str] = None,
+        password: Optional[str] = None,
+        wpa_cipher: str = hostapd_constants.WPA_DEFAULT_CIPHER,
+        wpa2_cipher: str = hostapd_constants.WPA2_DEFAULT_CIPER,
+        wpa_group_rekey: int = hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
+        wpa_strict_rekey: bool = hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
+        wep_default_key: int = hostapd_constants.WEP_DEFAULT_KEY,
+        radius_server_ip: Optional[str] = None,
+        radius_server_port: Optional[int] = None,
+        radius_server_secret: Optional[str] = None,
+    ) -> None:
         """Gather all of the security settings for WPA-PSK.  This could be
            expanded later.
 
@@ -70,63 +75,80 @@
         self.radius_server_port = radius_server_port
         self.radius_server_secret = radius_server_secret
         self.security_mode = hostapd_constants.SECURITY_STRING_TO_SECURITY_MODE_INT.get(
-            security_mode, None)
+            security_mode, None
+        )
         if password:
             if self.security_mode == hostapd_constants.WEP:
                 if len(password) in hostapd_constants.WEP_STR_LENGTH:
                     self.password = '"%s"' % password
                 elif len(password) in hostapd_constants.WEP_HEX_LENGTH and all(
-                        c in string.hexdigits for c in password):
+                    c in string.hexdigits for c in password
+                ):
                     self.password = password
                 else:
                     raise ValueError(
-                        'WEP key must be a hex string of %s characters' %
-                        hostapd_constants.WEP_HEX_LENGTH)
+                        "WEP key must be a hex string of %s characters"
+                        % hostapd_constants.WEP_HEX_LENGTH
+                    )
             else:
-                if len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH or len(
-                        password) > hostapd_constants.MAX_WPA_PSK_LENGTH:
+                if (
+                    len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH
+                    or len(password) > hostapd_constants.MAX_WPA_PSK_LENGTH
+                ):
                     raise ValueError(
-                        'Password must be a minumum of %s characters and a maximum of %s'
-                        % (hostapd_constants.MIN_WPA_PSK_LENGTH,
-                           hostapd_constants.MAX_WPA_PSK_LENGTH))
+                        "Password must be a minumum of %s characters and a maximum of %s"
+                        % (
+                            hostapd_constants.MIN_WPA_PSK_LENGTH,
+                            hostapd_constants.MAX_WPA_PSK_LENGTH,
+                        )
+                    )
                 else:
                     self.password = password
 
-    def generate_dict(self):
+    def generate_dict(self) -> Dict[str, Union[str, int]]:
         """Returns: an ordered dictionary of settings"""
         settings = collections.OrderedDict()
         if self.security_mode is not None:
             if self.security_mode == hostapd_constants.WEP:
-                settings['wep_default_key'] = self.wep_default_key
-                settings['wep_key' + str(self.wep_default_key)] = self.password
+                settings["wep_default_key"] = self.wep_default_key
+                settings["wep_key" + str(self.wep_default_key)] = self.password
             elif self.security_mode == hostapd_constants.ENT:
-                settings['auth_server_addr'] = self.radius_server_ip
-                settings['auth_server_port'] = self.radius_server_port
-                settings[
-                    'auth_server_shared_secret'] = self.radius_server_secret
-                settings['wpa_key_mgmt'] = hostapd_constants.ENT_KEY_MGMT
-                settings['ieee8021x'] = hostapd_constants.IEEE8021X
-                settings['wpa'] = hostapd_constants.WPA2
+                settings["auth_server_addr"] = self.radius_server_ip
+                settings["auth_server_port"] = self.radius_server_port
+                settings["auth_server_shared_secret"] = self.radius_server_secret
+                settings["wpa_key_mgmt"] = hostapd_constants.ENT_KEY_MGMT
+                settings["ieee8021x"] = hostapd_constants.IEEE8021X
+                settings["wpa"] = hostapd_constants.WPA2
             else:
-                settings['wpa'] = self.security_mode
+                settings["wpa"] = self.security_mode
                 if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
-                    settings['wpa_psk'] = self.password
+                    settings["wpa_psk"] = self.password
                 else:
-                    settings['wpa_passphrase'] = self.password
+                    settings["wpa_passphrase"] = self.password
                 # For wpa, wpa/wpa2, and wpa/wpa2/wpa3, add wpa_pairwise
-                if self.security_mode == hostapd_constants.WPA1 or self.security_mode == hostapd_constants.MIXED:
-                    settings['wpa_pairwise'] = self.wpa_cipher
+                if (
+                    self.security_mode == hostapd_constants.WPA1
+                    or self.security_mode == hostapd_constants.MIXED
+                ):
+                    settings["wpa_pairwise"] = self.wpa_cipher
                 # For wpa/wpa2, wpa2, wpa3, and wpa2/wpa3, and wpa/wpa2, wpa3, add rsn_pairwise
-                if self.security_mode == hostapd_constants.WPA2 or self.security_mode == hostapd_constants.MIXED:
-                    settings['rsn_pairwise'] = self.wpa2_cipher
+                if (
+                    self.security_mode == hostapd_constants.WPA2
+                    or self.security_mode == hostapd_constants.MIXED
+                ):
+                    settings["rsn_pairwise"] = self.wpa2_cipher
                 # Add wpa_key_mgmt based on security mode string
-                if self.security_mode_string in hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT:
+                if (
+                    self.security_mode_string
+                    in hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT
+                ):
                     settings[
-                        'wpa_key_mgmt'] = hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT[
-                            self.security_mode_string]
+                        "wpa_key_mgmt"
+                    ] = hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT[
+                        self.security_mode_string
+                    ]
                 if self.wpa_group_rekey:
-                    settings['wpa_group_rekey'] = self.wpa_group_rekey
+                    settings["wpa_group_rekey"] = self.wpa_group_rekey
                 if self.wpa_strict_rekey:
-                    settings[
-                        'wpa_strict_rekey'] = hostapd_constants.WPA_STRICT_REKEY
+                    settings["wpa_strict_rekey"] = hostapd_constants.WPA_STRICT_REKEY
         return settings
diff --git a/src/antlion/controllers/ap_lib/hostapd_utils.py b/src/antlion/controllers/ap_lib/hostapd_utils.py
index 3387ed1..82331bf 100644
--- a/src/antlion/controllers/ap_lib/hostapd_utils.py
+++ b/src/antlion/controllers/ap_lib/hostapd_utils.py
@@ -48,9 +48,9 @@
         valid_interfaces: list of valid interface names
     """
     if not interface:
-        raise ValueError('Required wlan interface is missing.')
+        raise ValueError("Required wlan interface is missing.")
     if interface not in valid_interfaces:
-        raise ValueError('Invalid interface name was passed: %s' % interface)
+        raise ValueError("Invalid interface name was passed: %s" % interface)
 
 
 def verify_security_mode(security_profile, valid_security_modes):
@@ -63,12 +63,13 @@
     """
     if security_profile is None:
         if None not in valid_security_modes:
-            raise ValueError('Open security is not allowed for this profile.')
+            raise ValueError("Open security is not allowed for this profile.")
     elif security_profile.security_mode not in valid_security_modes:
         raise ValueError(
-            'Invalid Security Mode: %s. '
-            'Valid Security Modes for this profile: %s.' %
-            (security_profile.security_mode, valid_security_modes))
+            "Invalid Security Mode: %s. "
+            "Valid Security Modes for this profile: %s."
+            % (security_profile.security_mode, valid_security_modes)
+        )
 
 
 def verify_cipher(security_profile, valid_ciphers):
@@ -79,17 +80,20 @@
         valid_ciphers: a list of valid ciphers for a profile.
     """
     if security_profile is None:
-        raise ValueError('Security mode is open.')
+        raise ValueError("Security mode is open.")
     elif security_profile.security_mode == hostapd_constants.WPA1:
         if security_profile.wpa_cipher not in valid_ciphers:
-            raise ValueError('Invalid WPA Cipher: %s. '
-                             'Valid WPA Ciphers for this profile: %s' %
-                             (security_profile.wpa_cipher, valid_ciphers))
+            raise ValueError(
+                "Invalid WPA Cipher: %s. "
+                "Valid WPA Ciphers for this profile: %s"
+                % (security_profile.wpa_cipher, valid_ciphers)
+            )
     elif security_profile.security_mode == hostapd_constants.WPA2:
         if security_profile.wpa2_cipher not in valid_ciphers:
-            raise ValueError('Invalid WPA2 Cipher: %s. '
-                             'Valid WPA2 Ciphers for this profile: %s' %
-                             (security_profile.wpa2_cipher, valid_ciphers))
+            raise ValueError(
+                "Invalid WPA2 Cipher: %s. "
+                "Valid WPA2 Ciphers for this profile: %s"
+                % (security_profile.wpa2_cipher, valid_ciphers)
+            )
     else:
-        raise ValueError('Invalid Security Mode: %s' %
-                         security_profile.security_mode)
+        raise ValueError("Invalid Security Mode: %s" % security_profile.security_mode)
diff --git a/src/antlion/controllers/ap_lib/radio_measurement.py b/src/antlion/controllers/ap_lib/radio_measurement.py
index 254adc5..5c7f2e0 100644
--- a/src/antlion/controllers/ap_lib/radio_measurement.py
+++ b/src/antlion/controllers/ap_lib/radio_measurement.py
@@ -23,6 +23,7 @@
 
     See IEEE 802.11-2020 Figure 9-172.
     """
+
     NOT_REACHABLE = 1
     UNKNOWN = 2
     REACHABLE = 3
@@ -34,11 +35,13 @@
     See IEEE 802.11-2020 Figure 9-338 and 9.4.1.4.
     """
 
-    def __init__(self,
-                 spectrum_management: bool = False,
-                 qos: bool = False,
-                 apsd: bool = False,
-                 radio_measurement: bool = False):
+    def __init__(
+        self,
+        spectrum_management: bool = False,
+        qos: bool = False,
+        apsd: bool = False,
+        radio_measurement: bool = False,
+    ):
         """Create a capabilities object.
 
         Args:
@@ -54,10 +57,12 @@
 
     def __index__(self) -> int:
         """Convert to numeric representation of the field's bits."""
-        return self.spectrum_management << 5 \
-            | self.qos << 4 \
-            | self.apsd << 3 \
+        return (
+            self.spectrum_management << 5
+            | self.qos << 4
+            | self.apsd << 3
             | self.radio_measurement << 2
+        )
 
     @property
     def spectrum_management(self) -> bool:
@@ -83,16 +88,17 @@
     neighbor report element. See IEEE 802.11-2020 Figure 9-337.
     """
 
-    def __init__(self,
-                 ap_reachability: ApReachability = ApReachability.UNKNOWN,
-                 security: bool = False,
-                 key_scope: bool = False,
-                 capabilities:
-                 BssidInformationCapabilities = BssidInformationCapabilities(),
-                 mobility_domain: bool = False,
-                 high_throughput: bool = False,
-                 very_high_throughput: bool = False,
-                 ftm: bool = False):
+    def __init__(
+        self,
+        ap_reachability: ApReachability = ApReachability.UNKNOWN,
+        security: bool = False,
+        key_scope: bool = False,
+        capabilities: BssidInformationCapabilities = BssidInformationCapabilities(),
+        mobility_domain: bool = False,
+        high_throughput: bool = False,
+        very_high_throughput: bool = False,
+        ftm: bool = False,
+    ):
         """Create a BSSID Information object for a neighboring AP.
 
         Args:
@@ -128,14 +134,16 @@
 
     def __index__(self) -> int:
         """Convert to numeric representation of the field's bits."""
-        return self._ap_reachability << 30 \
-            | self.security << 29 \
-            | self.key_scope << 28 \
-            | int(self.capabilities) << 22 \
-            | self.mobility_domain << 21 \
-            | self.high_throughput << 20 \
-            | self.very_high_throughput << 19 \
+        return (
+            self._ap_reachability << 30
+            | self.security << 29
+            | self.key_scope << 28
+            | int(self.capabilities) << 22
+            | self.mobility_domain << 21
+            | self.high_throughput << 20
+            | self.very_high_throughput << 19
             | self.ftm << 18
+        )
 
     @property
     def security(self) -> bool:
@@ -169,6 +177,7 @@
 @unique
 class PhyType(IntEnum):
     """PHY type values, see dot11PhyType in 802.11-2020 Annex C."""
+
     DSSS = 2
     OFDM = 4
     HRDSS = 5
@@ -188,8 +197,14 @@
     See IEEE 802.11-2020 9.4.2.36.
     """
 
-    def __init__(self, bssid: str, bssid_information: BssidInformation,
-                 operating_class: int, channel_number: int, phy_type: PhyType):
+    def __init__(
+        self,
+        bssid: str,
+        bssid_information: BssidInformation,
+        operating_class: int,
+        channel_number: int,
+        phy_type: PhyType,
+    ):
         """Create a neighbor report element.
 
         Args:
diff --git a/src/antlion/controllers/ap_lib/radvd.py b/src/antlion/controllers/ap_lib/radvd.py
index 9761c44..216ad0e 100644
--- a/src/antlion/controllers/ap_lib/radvd.py
+++ b/src/antlion/controllers/ap_lib/radvd.py
@@ -17,6 +17,9 @@
 import tempfile
 import time
 
+from typing import Any, Optional
+
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
 from antlion.controllers.utils_lib.commands import shell
 from antlion.libs.proc import job
 
@@ -35,38 +38,45 @@
     Attributes:
         config: The radvd configuration that is being used.
     """
-    def __init__(self, runner, interface, working_dir=None, radvd_binary=None):
+
+    def __init__(
+        self,
+        runner: Any,
+        interface: str,
+        working_dir: Optional[str] = None,
+        radvd_binary: Optional[str] = None,
+    ) -> None:
         """
         Args:
             runner: Object that has run_async and run methods for executing
                     shell commands (e.g. connection.SshConnection)
-            interface: string, The name of the interface to use (eg. wlan0).
-            working_dir: The directory to work out of.
+            interface: Name of the interface to use (eg. wlan0).
+            working_dir: Directory to work out of.
             radvd_binary: Location of the radvd binary
         """
         if not radvd_binary:
-            logging.debug('No radvd binary specified.  '
-                          'Assuming radvd is in the path.')
-            radvd_binary = 'radvd'
+            logging.debug(
+                "No radvd binary specified.  " "Assuming radvd is in the path."
+            )
+            radvd_binary = "radvd"
         else:
-            logging.debug('Using radvd binary located at %s' % radvd_binary)
+            logging.debug(f"Using radvd binary located at {radvd_binary}")
         if working_dir is None and runner == job.run:
             working_dir = tempfile.gettempdir()
         else:
-            working_dir = '/tmp'
+            working_dir = "/tmp"
         self._radvd_binary = radvd_binary
         self._runner = runner
         self._interface = interface
         self._working_dir = working_dir
-        self.config = None
+        self.config: Optional[RadvdConfig] = None
         self._shell = shell.ShellCommand(runner, working_dir)
-        self._log_file = '%s/radvd-%s.log' % (working_dir, self._interface)
-        self._config_file = '%s/radvd-%s.conf' % (working_dir, self._interface)
-        self._pid_file = '%s/radvd-%s.pid' % (working_dir, self._interface)
-        self._ps_identifier = '%s.*%s' % (self._radvd_binary,
-                                          self._config_file)
+        self._log_file = f"{working_dir}/radvd-{self._interface}.log"
+        self._config_file = f"{working_dir}/radvd-{self._interface}.conf"
+        self._pid_file = f"{working_dir}/radvd-{self._interface}.pid"
+        self._ps_identifier = f"{self._radvd_binary}.*{self._config_file}"
 
-    def start(self, config, timeout=60):
+    def start(self, config: RadvdConfig, timeout: int = 60) -> None:
         """Starts radvd
 
         Starts the radvd daemon and runs it in the background.
@@ -91,11 +101,12 @@
         self._shell.delete_file(self._config_file)
         self._write_configs(self.config)
 
-        radvd_command = '%s -C %s -p %s -m logfile -d 5 -l %s' % (
-            self._radvd_binary, shlex.quote(self._config_file),
-            shlex.quote(self._pid_file), self._log_file)
-        job_str = '%s > "%s" 2>&1' % (radvd_command, self._log_file)
-        self._runner.run_async(job_str)
+        command = (
+            f"{self._radvd_binary} -C {shlex.quote(self._config_file)} "
+            f"-p {shlex.quote(self._pid_file)} -m logfile -d 5 "
+            f'-l {self._log_file} > "{self._log_file}" 2>&1'
+        )
+        self._runner.run_async(command)
 
         try:
             self._wait_for_process(timeout=timeout)
@@ -114,7 +125,7 @@
         """
         return self._shell.is_alive(self._ps_identifier)
 
-    def pull_logs(self):
+    def pull_logs(self) -> str:
         """Pulls the log files from where radvd is running.
 
         Returns:
@@ -123,7 +134,7 @@
         # TODO: Auto pulling of logs when stop is called.
         return self._shell.read_file(self._log_file)
 
-    def _wait_for_process(self, timeout=60):
+    def _wait_for_process(self, timeout: int = 60) -> None:
         """Waits for the process to come up.
 
         Waits until the radvd process is found running, or there is
@@ -138,7 +149,7 @@
             self._scan_for_errors(False)
         self._scan_for_errors(True)
 
-    def _scan_for_errors(self, should_be_up):
+    def _scan_for_errors(self, should_be_up: bool) -> None:
         """Scans the radvd log for any errors.
 
         Args:
@@ -152,13 +163,13 @@
         # Store this so that all other errors have priority.
         is_dead = not self.is_alive()
 
-        exited_prematurely = self._shell.search_file('Exiting', self._log_file)
+        exited_prematurely = self._shell.search_file("Exiting", self._log_file)
         if exited_prematurely:
-            raise Error('Radvd exited prematurely.', self)
+            raise Error("Radvd exited prematurely.", self)
         if should_be_up and is_dead:
-            raise Error('Radvd failed to start', self)
+            raise Error("Radvd failed to start", self)
 
-    def _write_configs(self, config):
+    def _write_configs(self, config: RadvdConfig) -> None:
         """Writes the configs to the radvd config file.
 
         Args:
@@ -166,38 +177,39 @@
         """
         self._shell.delete_file(self._config_file)
         conf = config.package_configs()
-        lines = ['interface %s {' % self._interface]
-        for (interface_option_key,
-             interface_option) in conf['interface_options'].items():
-            lines.append('\t%s %s;' %
-                         (str(interface_option_key), str(interface_option)))
-        lines.append('\tprefix %s' % conf['prefix'])
-        lines.append('\t{')
-        for prefix_option in conf['prefix_options'].items():
-            lines.append('\t\t%s;' % ' '.join(map(str, prefix_option)))
-        lines.append('\t};')
-        if conf['clients']:
-            lines.append('\tclients')
-            lines.append('\t{')
-            for client in conf['clients']:
-                lines.append('\t\t%s;' % client)
-            lines.append('\t};')
-        if conf['route']:
-            lines.append('\troute %s {' % conf['route'])
-            for route_option in conf['route_options'].items():
-                lines.append('\t\t%s;' % ' '.join(map(str, route_option)))
-            lines.append('\t};')
-        if conf['rdnss']:
-            lines.append('\tRDNSS %s {' %
-                         ' '.join([str(elem) for elem in conf['rdnss']]))
-            for rdnss_option in conf['rdnss_options'].items():
-                lines.append('\t\t%s;' % ' '.join(map(str, rdnss_option)))
-            lines.append('\t};')
-        lines.append('};')
-        output_config = '\n'.join(lines)
-        logging.info('Writing %s' % self._config_file)
-        logging.debug('******************Start*******************')
-        logging.debug('\n%s' % output_config)
-        logging.debug('*******************End********************')
+        lines = ["interface %s {" % self._interface]
+        for interface_option_key, interface_option in conf["interface_options"].items():
+            lines.append(
+                "\t%s %s;" % (str(interface_option_key), str(interface_option))
+            )
+        lines.append("\tprefix %s" % conf["prefix"])
+        lines.append("\t{")
+        for prefix_option in conf["prefix_options"].items():
+            lines.append("\t\t%s;" % " ".join(map(str, prefix_option)))
+        lines.append("\t};")
+        if conf["clients"]:
+            lines.append("\tclients")
+            lines.append("\t{")
+            for client in conf["clients"]:
+                lines.append("\t\t%s;" % client)
+            lines.append("\t};")
+        if conf["route"]:
+            lines.append("\troute %s {" % conf["route"])
+            for route_option in conf["route_options"].items():
+                lines.append("\t\t%s;" % " ".join(map(str, route_option)))
+            lines.append("\t};")
+        if conf["rdnss"]:
+            lines.append(
+                "\tRDNSS %s {" % " ".join([str(elem) for elem in conf["rdnss"]])
+            )
+            for rdnss_option in conf["rdnss_options"].items():
+                lines.append("\t\t%s;" % " ".join(map(str, rdnss_option)))
+            lines.append("\t};")
+        lines.append("};")
+        output_config = "\n".join(lines)
+        logging.info("Writing %s" % self._config_file)
+        logging.debug("******************Start*******************")
+        logging.debug("\n%s" % output_config)
+        logging.debug("*******************End********************")
 
         self._shell.write_file(self._config_file, output_config)
diff --git a/src/antlion/controllers/ap_lib/radvd_config.py b/src/antlion/controllers/ap_lib/radvd_config.py
index f8e583e..647df82 100644
--- a/src/antlion/controllers/ap_lib/radvd_config.py
+++ b/src/antlion/controllers/ap_lib/radvd_config.py
@@ -12,6 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from typing import Any, List, Optional
+
 from antlion.controllers.ap_lib import radvd_constants
 
 import collections
@@ -23,43 +25,45 @@
     All the settings for a router advertisement daemon.
     """
 
-    def __init__(self,
-                 prefix=radvd_constants.DEFAULT_PREFIX,
-                 clients=[],
-                 route=None,
-                 rdnss=[],
-                 ignore_if_missing=None,
-                 adv_send_advert=radvd_constants.ADV_SEND_ADVERT_ON,
-                 unicast_only=None,
-                 max_rtr_adv_interval=None,
-                 min_rtr_adv_interval=None,
-                 min_delay_between_ras=None,
-                 adv_managed_flag=None,
-                 adv_other_config_flag=None,
-                 adv_link_mtu=None,
-                 adv_reachable_time=None,
-                 adv_retrans_timer=None,
-                 adv_cur_hop_limit=None,
-                 adv_default_lifetime=None,
-                 adv_default_preference=None,
-                 adv_source_ll_address=None,
-                 adv_home_agent_flag=None,
-                 adv_home_agent_info=None,
-                 home_agent_lifetime=None,
-                 home_agent_preference=None,
-                 adv_mob_rtr_support_flag=None,
-                 adv_interval_opt=None,
-                 adv_on_link=radvd_constants.ADV_ON_LINK_ON,
-                 adv_autonomous=radvd_constants.ADV_AUTONOMOUS_ON,
-                 adv_router_addr=None,
-                 adv_valid_lifetime=None,
-                 adv_preferred_lifetime=None,
-                 base_6to4_interface=None,
-                 adv_route_lifetime=None,
-                 adv_route_preference=None,
-                 adv_rdnss_preference=None,
-                 adv_rdnss_open=None,
-                 adv_rdnss_lifetime=None):
+    def __init__(
+        self,
+        prefix: str = radvd_constants.DEFAULT_PREFIX,
+        clients: List[str] = [],
+        route: Optional[Any] = None,
+        rdnss: List[str] = [],
+        ignore_if_missing: Optional[str] = None,
+        adv_send_advert: str = radvd_constants.ADV_SEND_ADVERT_ON,
+        unicast_only: Optional[str] = None,
+        max_rtr_adv_interval: Optional[int] = None,
+        min_rtr_adv_interval: Optional[int] = None,
+        min_delay_between_ras: Optional[int] = None,
+        adv_managed_flag: Optional[str] = None,
+        adv_other_config_flag: Optional[str] = None,
+        adv_link_mtu: Optional[int] = None,
+        adv_reachable_time: Optional[int] = None,
+        adv_retrans_timer: Optional[int] = None,
+        adv_cur_hop_limit: Optional[int] = None,
+        adv_default_lifetime: Optional[int] = None,
+        adv_default_preference: Optional[str] = None,
+        adv_source_ll_address: Optional[str] = None,
+        adv_home_agent_flag: Optional[str] = None,
+        adv_home_agent_info: Optional[str] = None,
+        home_agent_lifetime: Optional[int] = None,
+        home_agent_preference: Optional[int] = None,
+        adv_mob_rtr_support_flag: Optional[str] = None,
+        adv_interval_opt: Optional[str] = None,
+        adv_on_link: str = radvd_constants.ADV_ON_LINK_ON,
+        adv_autonomous: str = radvd_constants.ADV_AUTONOMOUS_ON,
+        adv_router_addr: Optional[str] = None,
+        adv_valid_lifetime: Optional[int] = None,
+        adv_preferred_lifetime: Optional[int] = None,
+        base_6to4_interface: Optional[str] = None,
+        adv_route_lifetime: Optional[int] = None,
+        adv_route_preference: Optional[str] = None,
+        adv_rdnss_preference: Optional[int] = None,
+        adv_rdnss_open: Optional[str] = None,
+        adv_rdnss_lifetime: Optional[int] = None,
+    ) -> None:
         """Construct a RadvdConfig.
 
         Args:
@@ -238,53 +242,73 @@
 
     def package_configs(self):
         conf = dict()
-        conf['prefix'] = self._prefix
-        conf['clients'] = self._clients
-        conf['route'] = self._route
-        conf['rdnss'] = self._rdnss
+        conf["prefix"] = self._prefix
+        conf["clients"] = self._clients
+        conf["route"] = self._route
+        conf["rdnss"] = self._rdnss
 
-        conf['interface_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('IgnoreIfMissing', self._ignore_if_missing),
-                    ('AdvSendAdvert', self._adv_send_advert),
-                    ('UnicastOnly', self._unicast_only),
-                    ('MaxRtrAdvInterval', self._max_rtr_adv_interval),
-                    ('MinRtrAdvInterval', self._min_rtr_adv_interval),
-                    ('MinDelayBetweenRAs', self._min_delay_between_ras),
-                    ('AdvManagedFlag', self._adv_managed_flag),
-                    ('AdvOtherConfigFlag', self._adv_other_config_flag),
-                    ('AdvLinkMTU', self._adv_link_mtu),
-                    ('AdvReachableTime', self._adv_reachable_time),
-                    ('AdvRetransTimer', self._adv_retrans_timer),
-                    ('AdvCurHopLimit', self._adv_cur_hop_limit),
-                    ('AdvDefaultLifetime', self._adv_default_lifetime),
-                    ('AdvDefaultPreference', self._adv_default_preference),
-                    ('AdvSourceLLAddress', self._adv_source_ll_address),
-                    ('AdvHomeAgentFlag', self._adv_home_agent_flag),
-                    ('AdvHomeAgentInfo', self._adv_home_agent_info),
-                    ('HomeAgentLifetime', self._home_agent_lifetime),
-                    ('HomeAgentPreference', self._home_agent_preference),
-                    ('AdvMobRtrSupportFlag', self._adv_mob_rtr_support_flag),
-                    ('AdvIntervalOpt', self._adv_interval_opt))))
+        conf["interface_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("IgnoreIfMissing", self._ignore_if_missing),
+                    ("AdvSendAdvert", self._adv_send_advert),
+                    ("UnicastOnly", self._unicast_only),
+                    ("MaxRtrAdvInterval", self._max_rtr_adv_interval),
+                    ("MinRtrAdvInterval", self._min_rtr_adv_interval),
+                    ("MinDelayBetweenRAs", self._min_delay_between_ras),
+                    ("AdvManagedFlag", self._adv_managed_flag),
+                    ("AdvOtherConfigFlag", self._adv_other_config_flag),
+                    ("AdvLinkMTU", self._adv_link_mtu),
+                    ("AdvReachableTime", self._adv_reachable_time),
+                    ("AdvRetransTimer", self._adv_retrans_timer),
+                    ("AdvCurHopLimit", self._adv_cur_hop_limit),
+                    ("AdvDefaultLifetime", self._adv_default_lifetime),
+                    ("AdvDefaultPreference", self._adv_default_preference),
+                    ("AdvSourceLLAddress", self._adv_source_ll_address),
+                    ("AdvHomeAgentFlag", self._adv_home_agent_flag),
+                    ("AdvHomeAgentInfo", self._adv_home_agent_info),
+                    ("HomeAgentLifetime", self._home_agent_lifetime),
+                    ("HomeAgentPreference", self._home_agent_preference),
+                    ("AdvMobRtrSupportFlag", self._adv_mob_rtr_support_flag),
+                    ("AdvIntervalOpt", self._adv_interval_opt),
+                ),
+            )
+        )
 
-        conf['prefix_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('AdvOnLink', self._adv_on_link),
-                    ('AdvAutonomous', self._adv_autonomous),
-                    ('AdvRouterAddr', self._adv_router_addr),
-                    ('AdvValidLifetime', self._adv_valid_lifetime),
-                    ('AdvPreferredLifetime', self._adv_preferred_lifetime),
-                    ('Base6to4Interface', self._base_6to4_interface))))
+        conf["prefix_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvOnLink", self._adv_on_link),
+                    ("AdvAutonomous", self._adv_autonomous),
+                    ("AdvRouterAddr", self._adv_router_addr),
+                    ("AdvValidLifetime", self._adv_valid_lifetime),
+                    ("AdvPreferredLifetime", self._adv_preferred_lifetime),
+                    ("Base6to4Interface", self._base_6to4_interface),
+                ),
+            )
+        )
 
-        conf['route_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('AdvRouteLifetime', self._adv_route_lifetime),
-                    ('AdvRoutePreference', self._adv_route_preference))))
+        conf["route_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvRouteLifetime", self._adv_route_lifetime),
+                    ("AdvRoutePreference", self._adv_route_preference),
+                ),
+            )
+        )
 
-        conf['rdnss_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('AdvRDNSSPreference', self._adv_rdnss_preference),
-                    ('AdvRDNSSOpen', self._adv_rdnss_open),
-                    ('AdvRDNSSLifetime', self._adv_rdnss_lifetime))))
+        conf["rdnss_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvRDNSSPreference", self._adv_rdnss_preference),
+                    ("AdvRDNSSOpen", self._adv_rdnss_open),
+                    ("AdvRDNSSLifetime", self._adv_rdnss_lifetime),
+                ),
+            )
+        )
 
         return conf
diff --git a/src/antlion/controllers/ap_lib/radvd_constants.py b/src/antlion/controllers/ap_lib/radvd_constants.py
index 172a660..b02a694 100644
--- a/src/antlion/controllers/ap_lib/radvd_constants.py
+++ b/src/antlion/controllers/ap_lib/radvd_constants.py
@@ -14,53 +14,53 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-DEFAULT_PREFIX = 'fd00::/64'
+DEFAULT_PREFIX = "fd00::/64"
 
-IGNORE_IF_MISSING_ON = 'on'
-IGNORE_IF_MISSING_OFF = 'off'
+IGNORE_IF_MISSING_ON = "on"
+IGNORE_IF_MISSING_OFF = "off"
 
-ADV_SEND_ADVERT_ON = 'on'
-ADV_SEND_ADVERT_OFF = 'off'
+ADV_SEND_ADVERT_ON = "on"
+ADV_SEND_ADVERT_OFF = "off"
 
-UNICAST_ONLY_ON = 'on'
-UNICAST_ONLY_OFF = 'off'
+UNICAST_ONLY_ON = "on"
+UNICAST_ONLY_OFF = "off"
 
-ADV_MANAGED_FLAG_ON = 'on'
-ADV_MANAGED_FLAG_OFF = 'off'
+ADV_MANAGED_FLAG_ON = "on"
+ADV_MANAGED_FLAG_OFF = "off"
 
-ADV_OTHER_CONFIG_FLAG_ON = 'on'
-ADV_OTHER_CONFIG_FLAG_OFF = 'off'
+ADV_OTHER_CONFIG_FLAG_ON = "on"
+ADV_OTHER_CONFIG_FLAG_OFF = "off"
 
-ADV_DEFAULT_PREFERENCE_ON = 'on'
-ADV_DEFAULT_PREFERENCE_OFF = 'off'
+ADV_DEFAULT_PREFERENCE_ON = "on"
+ADV_DEFAULT_PREFERENCE_OFF = "off"
 
-ADV_SOURCE_LL_ADDRESS_ON = 'on'
-ADV_SOURCE_LL_ADDRESS_OFF = 'off'
+ADV_SOURCE_LL_ADDRESS_ON = "on"
+ADV_SOURCE_LL_ADDRESS_OFF = "off"
 
-ADV_HOME_AGENT_FLAG_ON = 'on'
-ADV_HOME_AGENT_FLAG_OFF = 'off'
+ADV_HOME_AGENT_FLAG_ON = "on"
+ADV_HOME_AGENT_FLAG_OFF = "off"
 
-ADV_HOME_AGENT_INFO_ON = 'on'
-ADV_HOME_AGENT_INFO_OFF = 'off'
+ADV_HOME_AGENT_INFO_ON = "on"
+ADV_HOME_AGENT_INFO_OFF = "off"
 
-ADV_MOB_RTR_SUPPORT_FLAG_ON = 'on'
-ADV_MOB_RTR_SUPPORT_FLAG_OFF = 'off'
+ADV_MOB_RTR_SUPPORT_FLAG_ON = "on"
+ADV_MOB_RTR_SUPPORT_FLAG_OFF = "off"
 
-ADV_INTERVAL_OPT_ON = 'on'
-ADV_INTERVAL_OPT_OFF = 'off'
+ADV_INTERVAL_OPT_ON = "on"
+ADV_INTERVAL_OPT_OFF = "off"
 
-ADV_ON_LINK_ON = 'on'
-ADV_ON_LINK_OFF = 'off'
+ADV_ON_LINK_ON = "on"
+ADV_ON_LINK_OFF = "off"
 
-ADV_AUTONOMOUS_ON = 'on'
-ADV_AUTONOMOUS_OFF = 'off'
+ADV_AUTONOMOUS_ON = "on"
+ADV_AUTONOMOUS_OFF = "off"
 
-ADV_ROUTER_ADDR_ON = 'on'
-ADV_ROUTER_ADDR_OFF = 'off'
+ADV_ROUTER_ADDR_ON = "on"
+ADV_ROUTER_ADDR_OFF = "off"
 
-ADV_ROUTE_PREFERENCE_LOW = 'low'
-ADV_ROUTE_PREFERENCE_MED = 'medium'
-ADV_ROUTE_PREFERENCE_HIGH = 'high'
+ADV_ROUTE_PREFERENCE_LOW = "low"
+ADV_ROUTE_PREFERENCE_MED = "medium"
+ADV_ROUTE_PREFERENCE_HIGH = "high"
 
-ADV_RDNSS_OPEN_ON = 'on'
-ADV_RDNSS_OPEN_OFF = 'off'
+ADV_RDNSS_OPEN_ON = "on"
+ADV_RDNSS_OPEN_OFF = "off"
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
index 78931e9..9e48935 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
@@ -19,10 +19,7 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def actiontec_pk5000(iface_wlan_2g=None,
-                     channel=None,
-                     security=None,
-                     ssid=None):
+def actiontec_pk5000(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     """A simulated implementation of what a Actiontec PK5000 AP
     Args:
         iface_wlan_2g: The 2.4 interface of the test AP.
@@ -43,16 +40,15 @@
     if channel > 11:
         # Technically this should be 14 but since the PK5000 is a US only AP,
         # 11 is the highest allowable channel.
-        raise ValueError('The Actiontec PK5000 does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The Actiontec PK5000 does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     interface = iface_wlan_2g
     short_preamble = False
@@ -62,7 +58,8 @@
     # Sets the basic rates and supported rates of the PK5000
     additional_params = utils.merge_dicts(
         hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES)
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -75,15 +72,13 @@
         beacon_interval=beacon_interval,
         dtim_period=dtim_period,
         short_preamble=short_preamble,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
 
 
-def actiontec_mi424wr(iface_wlan_2g=None,
-                      channel=None,
-                      security=None,
-                      ssid=None):
+def actiontec_mi424wr(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an Actiontec MI424WR AP.
     Args:
@@ -107,32 +102,32 @@
                 RIFS: Prohibited
     """
     if channel > 11:
-        raise ValueError('The Actiontec MI424WR does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The Actiontec MI424WR does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
-    rates = utils.merge_dicts(hostapd_constants.CCK_AND_OFDM_DATA_RATES,
-                              hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+    rates = utils.merge_dicts(
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
+    )
     # Proprietary Atheros Communication: Adv Capability IE
     # Proprietary Atheros Communication: Unknown IE
     # Country Info: US Only IE
     vendor_elements = {
-        'vendor_elements':
-        'dd0900037f01010000ff7f'
-        'dd0a00037f04010000000000'
-        '0706555320010b1b'
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd0a00037f04010000000000"
+        "0706555320010b1b"
     }
 
     additional_params = utils.merge_dicts(rates, vendor_elements)
@@ -149,6 +144,7 @@
         dtim_period=1,
         short_preamble=True,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
index 376d02c..ea25157 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
@@ -19,11 +19,9 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def asus_rtac66u(iface_wlan_2g=None,
-                 iface_wlan_5g=None,
-                 channel=None,
-                 security=None,
-                 ssid=None):
+def asus_rtac66u(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an Asus RTAC66U AP.
@@ -75,15 +73,11 @@
                 Simulated: RIFS Prohibited
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -94,15 +88,14 @@
         hostapd_constants.N_CAPABILITY_RX_STBC1,
         hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
         hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
-        hostapd_constants.N_CAPABILITY_SGI20
+        hostapd_constants.N_CAPABILITY_SGI20,
     ]
     # WPS IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd310050f204104a00011010440001021047001093689729d373c26cb1563c6c570f33'
-        'd7103c0001031049000600372a000120'
-        'dd090010180200001c0000'
+        "vendor_elements": "dd310050f204104a00011010440001021047001093689729d373c26cb1563c6c570f33"
+        "d7103c0001031049000600372a000120"
+        "dd090010180200001c0000"
     }
 
     # 2.4GHz
@@ -123,11 +116,12 @@
             hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
             hostapd_constants.AC_CAPABILITY_RX_STBC_1,
             hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -143,16 +137,15 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
 
 
-def asus_rtac86u(iface_wlan_2g=None,
-                 iface_wlan_5g=None,
-                 channel=None,
-                 security=None,
-                 ssid=None):
+def asus_rtac86u(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     """A simulated implementation of an Asus RTAC86U AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
@@ -185,19 +178,15 @@
                     RSN PTKSA Replay Counter Capab: 1
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
 
     # 2.4GHz
     if channel <= 11:
@@ -206,24 +195,23 @@
         rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
         spectrum_mgmt = False
         # Measurement Pilot Transmission IE
-        vendor_elements = {'vendor_elements': '42020000'}
+        vendor_elements = {"vendor_elements": "42020000"}
 
     # 5GHz
     else:
         interface = iface_wlan_5g
         mode = hostapd_constants.MODE_11A
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        spectrum_mgmt = True,
+        spectrum_mgmt = (True,)
         # Country Information IE (w/ individual channel info)
         # TPC Report Transmit Power IE
         # Measurement Pilot Transmission IE
         vendor_elements = {
-            'vendor_elements':
-            '074255532024011e28011e2c011e30011e34011e38011e3c011e40011e64011e'
-            '68011e6c011e70011e74011e84011e88011e8c011e95011e99011e9d011ea1011e'
-            'a5011e'
-            '23021300'
-            '42020000'
+            "vendor_elements": "074255532024011e28011e2c011e30011e34011e38011e3c011e40011e64011e"
+            "68011e6c011e70011e74011e84011e88011e8c011e95011e99011e9d011ea1011e"
+            "a5011e"
+            "23021300"
+            "42020000"
         }
 
     additional_params = utils.merge_dicts(rates, qbss, vendor_elements)
@@ -240,15 +228,14 @@
         dtim_period=3,
         short_preamble=False,
         spectrum_mgmt_required=spectrum_mgmt,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def asus_rtac5300(iface_wlan_2g=None,
-                  iface_wlan_5g=None,
-                  channel=None,
-                  security=None,
-                  ssid=None):
+def asus_rtac5300(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an Asus RTAC5300 AP.
@@ -297,29 +284,25 @@
                 Simulated: RIFS Prohibited
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
     vht_channel_width = 20
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_LDPC,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_SGI20
+        hostapd_constants.N_CAPABILITY_SGI20,
     ]
 
     # Broadcom IE
-    vendor_elements = {'vendor_elements': 'dd090010180200009c0000'}
+    vendor_elements = {"vendor_elements": "dd090010180200009c0000"}
 
     # 2.4GHz
     if channel <= 11:
@@ -328,9 +311,11 @@
         mode = hostapd_constants.MODE_11N_MIXED
         # AsusTek IE
         # Epigram 2.4GHz IE
-        vendor_elements['vendor_elements'] += 'dd25f832e4010101020100031411b5' \
-        '2fd437509c30b3d7f5cf5754fb125aed3b8507045aed3b85' \
-        'dd1e00904c0418bf0cb2798b0faaff0000aaff0000c0050001000000c3020002'
+        vendor_elements["vendor_elements"] += (
+            "dd25f832e4010101020100031411b5"
+            "2fd437509c30b3d7f5cf5754fb125aed3b8507045aed3b85"
+            "dd1e00904c0418bf0cb2798b0faaff0000aaff0000c0050001000000c3020002"
+        )
         ac_capabilities = None
 
     # 5GHz
@@ -339,18 +324,19 @@
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         mode = hostapd_constants.MODE_11AC_MIXED
         # Epigram 5GHz IE
-        vendor_elements['vendor_elements'] += 'dd0500904c0410'
+        vendor_elements["vendor_elements"] += "dd0500904c0410"
         ac_capabilities = [
             hostapd_constants.AC_CAPABILITY_RXLDPC,
             hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
             hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
             hostapd_constants.AC_CAPABILITY_RX_STBC_1,
             hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
-    additional_params = utils.merge_dicts(rates, qbss, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, qbss, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -366,15 +352,14 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def asus_rtn56u(iface_wlan_2g=None,
-                iface_wlan_5g=None,
-                channel=None,
-                security=None,
-                ssid=None):
+def asus_rtn56u(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     """A simulated implementation of an Asus RTN56U AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
@@ -409,24 +394,20 @@
                     RSN PTKSA Replay Counter Capab: 16
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
 
     # 2.4GHz
@@ -438,13 +419,12 @@
         # AP Channel Report IEs (2)
         # WPS IE
         vendor_elements = {
-            'vendor_elements':
-            'dd07000c4307000000'
-            '0706555320010b14'
-            '33082001020304050607'
-            '33082105060708090a0b'
-            'dd270050f204104a000110104400010210470010bc329e001dd811b286011c872c'
-            'd33448103c000101'
+            "vendor_elements": "dd07000c4307000000"
+            "0706555320010b14"
+            "33082001020304050607"
+            "33082105060708090a0b"
+            "dd270050f204104a000110104400010210470010bc329e001dd811b286011c872c"
+            "d33448103c000101"
         }
 
     # 5GHz
@@ -453,13 +433,11 @@
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         # Ralink Technology IE
         # US Country Code IE
-        vendor_elements = {
-            'vendor_elements': 'dd07000c4307000000'
-            '0706555320010b14'
-        }
+        vendor_elements = {"vendor_elements": "dd07000c4307000000" "0706555320010b14"}
 
-    additional_params = utils.merge_dicts(rates, vendor_elements, qbss,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, qbss, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -473,16 +451,15 @@
         dtim_period=1,
         short_preamble=False,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
 
 
-def asus_rtn66u(iface_wlan_2g=None,
-                iface_wlan_5g=None,
-                channel=None,
-                security=None,
-                ssid=None):
+def asus_rtn66u(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an Asus RTN66U AP.
     Args:
@@ -516,15 +493,11 @@
                     Simulated: MPDU Density 8
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -533,10 +506,10 @@
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
     ]
     # Broadcom IE
-    vendor_elements = {'vendor_elements': 'dd090010180200001c0000'}
+    vendor_elements = {"vendor_elements": "dd090010180200001c0000"}
 
     # 2.4GHz
     if channel <= 11:
@@ -549,8 +522,9 @@
         interface = iface_wlan_5g
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -564,6 +538,7 @@
         dtim_period=3,
         short_preamble=False,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
index 957e214..9c5c99d 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
@@ -19,10 +19,7 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def belkin_f9k1001v5(iface_wlan_2g=None,
-                     channel=None,
-                     security=None,
-                     ssid=None):
+def belkin_f9k1001v5(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of what a Belkin F9K1001v5 AP
     Args:
@@ -52,35 +49,34 @@
                 RSN PTKSA Replay Counter Capab: 16
     """
     if channel > 11:
-        raise ValueError('The Belkin F9k1001v5 does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The Belkin F9k1001v5 does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
 
     rates = additional_params = utils.merge_dicts(
         hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES)
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    )
 
     # Broadcom IE
     # WPS IE
     vendor_elements = {
-        'vendor_elements':
-        'dd090010180200100c0000'
-        'dd180050f204104a00011010440001021049000600372a000120'
+        "vendor_elements": "dd090010180200100c0000"
+        "dd180050f204104a00011010440001021049000600372a000120"
     }
 
     additional_params = utils.merge_dicts(rates, vendor_elements)
@@ -97,6 +93,7 @@
         dtim_period=3,
         short_preamble=False,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
index 64d76f6..8010837 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
@@ -19,11 +19,9 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def linksys_ea4500(iface_wlan_2g=None,
-                   iface_wlan_5g=None,
-                   channel=None,
-                   security=None,
-                   ssid=None):
+def linksys_ea4500(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Linksys EA4500 AP
@@ -53,15 +51,11 @@
                 RSN PTKSA Replay Counter Capab: 16
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -71,17 +65,16 @@
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
 
     # Epigram HT Capabilities IE
     # Epigram HT Additional Capabilities IE
     # Marvell Semiconductor, Inc. IE
     vendor_elements = {
-        'vendor_elements':
-        'dd1e00904c33fc0117ffffff0000000000000000000000000000000000000000'
-        'dd1a00904c3424000000000000000000000000000000000000000000'
-        'dd06005043030000'
+        "vendor_elements": "dd1e00904c33fc0117ffffff0000000000000000000000000000000000000000"
+        "dd1a00904c3424000000000000000000000000000000000000000000"
+        "dd06005043030000"
     }
 
     # 2.4GHz
@@ -97,8 +90,9 @@
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         obss_interval = None
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -113,16 +107,15 @@
         short_preamble=True,
         obss_interval=obss_interval,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
 
 
-def linksys_ea9500(iface_wlan_2g=None,
-                   iface_wlan_5g=None,
-                   channel=None,
-                   security=None,
-                   ssid=None):
+def linksys_ea9500(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     """A simulated implementation of what a Linksys EA9500 AP
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
@@ -148,21 +141,17 @@
                 RSN PTKSA Replay Counter Capab: 1
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
     # Measurement Pilot Transmission IE
-    vendor_elements = {'vendor_elements': '42020000'}
+    vendor_elements = {"vendor_elements": "42020000"}
 
     # 2.4GHz
     if channel <= 11:
@@ -189,15 +178,14 @@
         beacon_interval=100,
         dtim_period=1,
         short_preamble=False,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def linksys_wrt1900acv2(iface_wlan_2g=None,
-                        iface_wlan_5g=None,
-                        channel=None,
-                        security=None,
-                        ssid=None):
+def linksys_wrt1900acv2(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Linksys WRT1900ACV2 AP
     Args:
@@ -233,22 +221,18 @@
                     RSN PTKSA Replay Counter Capab: 16
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_LDPC,
         hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_SGI40
+        hostapd_constants.N_CAPABILITY_SGI40,
     ]
     ac_capabilities = [
         hostapd_constants.AC_CAPABILITY_RXLDPC,
@@ -256,17 +240,16 @@
         hostapd_constants.AC_CAPABILITY_RX_STBC_1,
         hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
         hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
-        hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
+        hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
     ]
     vht_channel_width = 20
     # Epigram, Inc. HT Capabilities IE
     # Epigram, Inc. HT Additional Capabilities IE
     # Marvell Semiconductor IE
     vendor_elements = {
-        'vendor_elements':
-        'dd1e00904c336c0017ffffff0001000000000000000000000000001fff071800'
-        'dd1a00904c3424000000000000000000000000000000000000000000'
-        'dd06005043030000'
+        "vendor_elements": "dd1e00904c336c0017ffffff0001000000000000000000000000001fff071800"
+        "dd1a00904c3424000000000000000000000000000000000000000000"
+        "dd06005043030000"
     }
 
     # 2.4GHz
@@ -282,15 +265,16 @@
         interface = iface_wlan_5g
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         obss_interval = None
-        spectrum_mgmt = True,
-        local_pwr_constraint = {'local_pwr_constraint': 3}
+        spectrum_mgmt = (True,)
+        local_pwr_constraint = {"local_pwr_constraint": 3}
         # Country Information IE (w/ individual channel info)
-        vendor_elements['vendor_elements'] += '071e5553202401112801112c011130' \
-            '01119501179901179d0117a10117a50117'
+        vendor_elements["vendor_elements"] += (
+            "071e5553202401112801112c011130" "01119501179901179d0117a10117a50117"
+        )
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED,
-                                          local_pwr_constraint)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED, local_pwr_constraint
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -308,5 +292,6 @@
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
         spectrum_mgmt_required=spectrum_mgmt,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
index cf9bc93..25a91cd 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
@@ -19,11 +19,9 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def netgear_r7000(iface_wlan_2g=None,
-                  iface_wlan_5g=None,
-                  channel=None,
-                  security=None,
-                  ssid=None):
+def netgear_r7000(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Netgear R7000 AP
@@ -79,15 +77,11 @@
                     AP Channel Report Capability: Disabled
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -104,14 +98,13 @@
     # Epigram, Inc. IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd0600146c000000'
-        'dd310050f204104a00011010440001021047001066189606f1e967f9c0102048817a7'
-        '69e103c0001031049000600372a000120'
-        'dd1e00904c0408bf0cb259820feaff0000eaff0000c0050001000000c3020002'
-        'dd090010180200001c0000'
+        "vendor_elements": "dd0600146c000000"
+        "dd310050f204104a00011010440001021047001066189606f1e967f9c0102048817a7"
+        "69e103c0001031049000600372a000120"
+        "dd1e00904c0408bf0cb259820feaff0000eaff0000c0050001000000c3020002"
+        "dd090010180200001c0000"
     }
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
 
     # 2.4GHz
     if channel <= 11:
@@ -142,14 +135,17 @@
             hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
             hostapd_constants.AC_CAPABILITY_RX_STBC_1,
             hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
     additional_params = utils.merge_dicts(
-        rates, vendor_elements, qbss,
+        rates,
+        vendor_elements,
+        qbss,
         hostapd_constants.ENABLE_RRM_BEACON_REPORT,
         hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED)
+        hostapd_constants.UAPSD_ENABLED,
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -166,15 +162,14 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def netgear_wndr3400(iface_wlan_2g=None,
-                     iface_wlan_5g=None,
-                     channel=None,
-                     security=None,
-                     ssid=None):
+def netgear_wndr3400(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS on 5GHz once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Netgear WNDR3400 AP
@@ -209,15 +204,11 @@
                     Simulated: Green Field not supported on Whirlwind.
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -226,15 +217,14 @@
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
     # WPS IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd310050f204104a0001101044000102104700108c403eb883e7e225ab139828703ade'
-        'dc103c0001031049000600372a000120'
-        'dd090010180200f0040000'
+        "vendor_elements": "dd310050f204104a0001101044000102104700108c403eb883e7e225ab139828703ade"
+        "dc103c0001031049000600372a000120"
+        "dd090010180200f0040000"
     }
 
     # 2.4GHz
@@ -251,8 +241,9 @@
         obss_interval = None
         n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -267,6 +258,7 @@
         short_preamble=False,
         obss_interval=obss_interval,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
index b552b28..4a5bf68 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
@@ -19,8 +19,7 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def securifi_almond(iface_wlan_2g=None, channel=None, security=None,
-                    ssid=None):
+def securifi_almond(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     """A simulated implementation of a Securifi Almond AP
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
@@ -48,16 +47,15 @@
                     RSN PTKSA Replay Counter Capab: 16
     """
     if channel > 11:
-        raise ValueError('The Securifi Almond does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The Securifi Almond does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_HT40_PLUS,
@@ -65,24 +63,25 @@
         hostapd_constants.N_CAPABILITY_SGI40,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
 
-    rates = utils.merge_dicts(hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-                              hostapd_constants.CCK_AND_OFDM_DATA_RATES)
+    rates = utils.merge_dicts(
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    )
 
     # Ralink Technology IE
     # Country Information IE
     # AP Channel Report IEs
     vendor_elements = {
-        'vendor_elements':
-        'dd07000c4307000000'
-        '0706555320010b14'
-        '33082001020304050607'
-        '33082105060708090a0b'
+        "vendor_elements": "dd07000c4307000000"
+        "0706555320010b14"
+        "33082001020304050607"
+        "33082105060708090a0b"
     }
 
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
 
     additional_params = utils.merge_dicts(rates, vendor_elements, qbss)
 
@@ -99,6 +98,7 @@
         short_preamble=True,
         obss_interval=300,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
index 8911e3e..81eeeec 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
+++ b/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
@@ -19,11 +19,9 @@
 from antlion.controllers.ap_lib import hostapd_utils
 
 
-def tplink_archerc5(iface_wlan_2g=None,
-                    iface_wlan_5g=None,
-                    channel=None,
-                    security=None,
-                    ssid=None):
+def tplink_archerc5(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an TPLink ArcherC5 AP.
     Args:
@@ -73,15 +71,11 @@
                 Simulated: RIFS Prohibited
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -90,17 +84,16 @@
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
     ]
     # WPS IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd310050f204104a000110104400010210470010d96c7efc2f8938f1efbd6e5148bfa8'
-        '12103c0001031049000600372a000120'
-        'dd090010180200001c0000'
+        "vendor_elements": "dd310050f204104a000110104400010210470010d96c7efc2f8938f1efbd6e5148bfa8"
+        "12103c0001031049000600372a000120"
+        "dd090010180200001c0000"
     }
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
 
     # 2.4GHz
     if channel <= 11:
@@ -128,10 +121,13 @@
         ]
 
     additional_params = utils.merge_dicts(
-        rates, vendor_elements, qbss,
+        rates,
+        vendor_elements,
+        qbss,
         hostapd_constants.ENABLE_RRM_BEACON_REPORT,
         hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED)
+        hostapd_constants.UAPSD_ENABLED,
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -147,15 +143,14 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def tplink_archerc7(iface_wlan_2g=None,
-                    iface_wlan_5g=None,
-                    channel=None,
-                    security=None,
-                    ssid=None):
+def tplink_archerc7(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an TPLink ArcherC7 AP.
     Args:
@@ -183,15 +178,11 @@
                     RSN PTKSA Replay Counter Capab: 16
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -200,14 +191,13 @@
         hostapd_constants.N_CAPABILITY_LDPC,
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
     # Atheros IE
     # WPS IE
     vendor_elements = {
-        'vendor_elements':
-        'dd0900037f01010000ff7f'
-        'dd180050f204104a00011010440001021049000600372a000120'
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd180050f204104a00011010440001021049000600372a000120"
     }
 
     # 2.4GHz
@@ -229,14 +219,15 @@
         mode = hostapd_constants.MODE_11AC_MIXED
         spectrum_mgmt = True
         # Country Information IE (w/ individual channel info)
-        vendor_elements['vendor_elements'] += (
-            '074255532024011e28011e2c011e30'
-            '011e3401173801173c01174001176401176801176c0117700117740117840117'
-            '8801178c011795011e99011e9d011ea1011ea5011e')
-        pwr_constraint = {'local_pwr_constraint': 3}
+        vendor_elements["vendor_elements"] += (
+            "074255532024011e28011e2c011e30"
+            "011e3401173801173c01174001176401176801176c0117700117740117840117"
+            "8801178c011795011e99011e9d011ea1011ea5011e"
+        )
+        pwr_constraint = {"local_pwr_constraint": 3}
         n_capabilities += [
             hostapd_constants.N_CAPABILITY_SGI40,
-            hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
+            hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
         ]
 
         if hostapd_config.ht40_plus_allowed(channel):
@@ -252,12 +243,12 @@
             hostapd_constants.AC_CAPABILITY_RX_STBC_1,
             hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
             hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-            hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN
+            hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
         ]
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED,
-                                          pwr_constraint)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED, pwr_constraint
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -274,15 +265,14 @@
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
         spectrum_mgmt_required=spectrum_mgmt,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def tplink_c1200(iface_wlan_2g=None,
-                 iface_wlan_5g=None,
-                 channel=None,
-                 security=None,
-                 ssid=None):
+def tplink_c1200(
+    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
+):
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an TPLink C1200 AP.
@@ -320,15 +310,11 @@
                 Simulated: RIFS Prohibited
     """
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
@@ -337,15 +323,14 @@
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
     ]
     # WPS IE
     # Broadcom IE
     vendor_elements = {
-        'vendor_elements':
-        'dd350050f204104a000110104400010210470010000000000000000000000000000000'
-        '00103c0001031049000a00372a00012005022688'
-        'dd090010180200000c0000'
+        "vendor_elements": "dd350050f204104a000110104400010210470010000000000000000000000000000000"
+        "00103c0001031049000a00372a00012005022688"
+        "dd090010180200000c0000"
     }
 
     # 2.4GHz
@@ -373,9 +358,12 @@
         ]
 
     additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.ENABLE_RRM_BEACON_REPORT,
+        rates,
+        vendor_elements,
+        hostapd_constants.ENABLE_RRM_BEACON_REPORT,
         hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED)
+        hostapd_constants.UAPSD_ENABLED,
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -391,14 +379,12 @@
         n_capabilities=n_capabilities,
         ac_capabilities=ac_capabilities,
         vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
     return config
 
 
-def tplink_tlwr940n(iface_wlan_2g=None,
-                    channel=None,
-                    security=None,
-                    ssid=None):
+def tplink_tlwr940n(iface_wlan_2g=None, channel=None, security=None, ssid=None):
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an TPLink TLWR940N AP.
     Args:
@@ -419,37 +405,38 @@
                 RSN PTKSA Replay Counter Capab: 16
     """
     if channel > 11:
-        raise ValueError('The mock TP-Link TLWR940N does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
+        raise ValueError(
+            "The mock TP-Link TLWR940N does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
     # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
     if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_SGI20,
         hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
 
-    rates = utils.merge_dicts(hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-                              hostapd_constants.CCK_AND_OFDM_DATA_RATES)
+    rates = utils.merge_dicts(
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    )
 
     # Atheros Communications, Inc. IE
     # WPS IE
     vendor_elements = {
-        'vendor_elements':
-        'dd0900037f01010000ff7f'
-        'dd260050f204104a0001101044000102104900140024e2600200010160000002000160'
-        '0100020001'
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd260050f204104a0001101044000102104900140024e2600200010160000002000160"
+        "0100020001"
     }
 
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
+    additional_params = utils.merge_dicts(
+        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
+    )
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -463,6 +450,7 @@
         dtim_period=1,
         short_preamble=True,
         n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
+        additional_parameters=additional_params,
+    )
 
     return config
diff --git a/src/antlion/controllers/ap_lib/wireless_network_management.py b/src/antlion/controllers/ap_lib/wireless_network_management.py
index ecd5b3b..62ba34e 100644
--- a/src/antlion/controllers/ap_lib/wireless_network_management.py
+++ b/src/antlion/controllers/ap_lib/wireless_network_management.py
@@ -18,8 +18,9 @@
 
 from antlion.controllers.ap_lib.radio_measurement import NeighborReportElement
 
-BssTransitionCandidateList = NewType('BssTransitionCandidateList',
-                                     List[NeighborReportElement])
+BssTransitionCandidateList = NewType(
+    "BssTransitionCandidateList", List[NeighborReportElement]
+)
 
 
 class BssTerminationDuration:
@@ -50,16 +51,17 @@
     """
 
     def __init__(
-            self,
-            preferred_candidate_list_included: bool = False,
-            abridged: bool = False,
-            disassociation_imminent: bool = False,
-            ess_disassociation_imminent: bool = False,
-            disassociation_timer: int = 0,
-            validity_interval: int = 1,
-            bss_termination_duration: Optional[BssTerminationDuration] = None,
-            session_information_url: Optional[str] = None,
-            candidate_list: Optional[BssTransitionCandidateList] = None):
+        self,
+        preferred_candidate_list_included: bool = False,
+        abridged: bool = False,
+        disassociation_imminent: bool = False,
+        ess_disassociation_imminent: bool = False,
+        disassociation_timer: int = 0,
+        validity_interval: int = 1,
+        bss_termination_duration: Optional[BssTerminationDuration] = None,
+        session_information_url: Optional[str] = None,
+        candidate_list: Optional[BssTransitionCandidateList] = None,
+    ):
         """Create a BSS Transition Management request.
 
         Args:
diff --git a/src/antlion/controllers/arduino_wifi_dongle.py b/src/antlion/controllers/arduino_wifi_dongle.py
deleted file mode 100644
index 18f57e9..0000000
--- a/src/antlion/controllers/arduino_wifi_dongle.py
+++ /dev/null
@@ -1,390 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import re
-import subprocess
-import threading
-import time
-from datetime import datetime
-
-from serial import Serial
-
-from antlion import logger
-from antlion import signals
-from antlion import utils
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'ArduinoWifiDongle'
-ACTS_CONTROLLER_REFERENCE_NAME = 'arduino_wifi_dongles'
-
-WIFI_DONGLE_EMPTY_CONFIG_MSG = 'Configuration is empty, abort!'
-WIFI_DONGLE_NOT_LIST_CONFIG_MSG = 'Configuration should be a list, abort!'
-
-DEV = '/dev/'
-IP = 'IP: '
-STATUS = 'STATUS: '
-SSID = 'SSID: '
-RSSI = 'RSSI: '
-PING = 'PING: '
-SCAN_BEGIN = 'Scan Begin'
-SCAN_END = 'Scan End'
-READ_TIMEOUT = 10
-BAUD_RATE = 9600
-TMP_DIR = 'tmp/'
-SSID_KEY = 'SSID'
-PWD_KEY = 'password'
-
-
-class ArduinoWifiDongleError(signals.ControllerError):
-    pass
-
-
-def create(configs):
-    """Creates ArduinoWifiDongle objects.
-
-    Args:
-        configs: A list of dicts or a list of serial numbers, each representing
-                 a configuration of a arduino wifi dongle.
-
-    Returns:
-        A list of Wifi dongle objects.
-    """
-    if not configs:
-        raise ArduinoWifiDongleError(WIFI_DONGLE_EMPTY_CONFIG_MSG)
-    elif not isinstance(configs, list):
-        raise ArduinoWifiDongleError(WIFI_DONGLE_NOT_LIST_CONFIG_MSG)
-    elif isinstance(configs[0], str):
-        # Configs is a list of serials.
-        return get_instances(configs)
-    else:
-        # Configs is a list of dicts.
-        return get_instances_with_configs(configs)
-
-
-def destroy(wcs):
-    for wc in wcs:
-        wc.clean_up()
-
-
-def get_instances(configs):
-    wcs = []
-    for s in configs:
-        wcs.append(ArduinoWifiDongle(s))
-    return wcs
-
-
-def get_instances_with_configs(configs):
-    wcs = []
-    for c in configs:
-        try:
-            s = c.pop('serial')
-        except KeyError:
-            raise ArduinoWifiDongleError(
-                '"serial" is missing for ArduinoWifiDongle config %s.' % c)
-        wcs.append(ArduinoWifiDongle(s))
-    return wcs
-
-
-class ArduinoWifiDongle(object):
-    """Class representing an arduino wifi dongle.
-
-    Each object of this class represents one wifi dongle in ACTS.
-
-    Attribtues:
-        serial: Short serial number of the wifi dongle in string.
-        port: The terminal port the dongle is connected to in string.
-        log: A logger adapted from root logger with added token specific to an
-             ArduinoWifiDongle instance.
-        log_file_fd: File handle of the log file.
-        set_logging: Logging for the dongle is enabled when this param is set
-        lock: Lock to acquire and release set_logging variable
-        ssid: SSID of the wifi network the dongle is connected to.
-        ip_addr: IP address on the wifi interface.
-        scan_results: Most recent scan results.
-        ping: Ping status in bool - ping to www.google.com
-    """
-
-    def __init__(self, serial):
-        """Initializes the ArduinoWifiDongle object.
-
-        Args:
-            serial: The serial number for the wifi dongle.
-        """
-        if not serial:
-            raise ArduinoWifiDongleError(
-                'The ArduinoWifiDongle serial number must not be empty.')
-        self.serial = serial
-        self.port = self._get_serial_port()
-        self.log = logger.create_tagged_trace_logger(
-            'ArduinoWifiDongle|%s' % self.serial)
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_file_path = os.path.join(
-            log_path_base, 'ArduinoWifiDongle_%s_serial_log.txt' % self.serial)
-        self.log_file_fd = open(self.log_file_path, 'a')
-
-        self.set_logging = True
-        self.lock = threading.Lock()
-        self.start_controller_log()
-
-        self.ssid = None
-        self.ip_addr = None
-        self.status = 0
-        self.scan_results = []
-        self.scanning = False
-        self.ping = False
-
-        os.makedirs(TMP_DIR, exist_ok=True)
-
-    def clean_up(self):
-        """Cleans up the controller and releases any resources it claimed."""
-        self.stop_controller_log()
-        self.log_file_fd.close()
-
-    def _get_serial_port(self):
-        """Get the serial port for a given ArduinoWifiDongle serial number.
-
-        Returns:
-            Serial port in string if the dongle is attached.
-        """
-        cmd = 'ls %s' % DEV
-        serial_ports = utils.exe_cmd(cmd).decode('utf-8', 'ignore').split('\n')
-        for port in serial_ports:
-            if 'USB' not in port:
-                continue
-            tty_port = '%s%s' % (DEV, port)
-            cmd = 'udevadm info %s' % tty_port
-            udev_output = utils.exe_cmd(cmd).decode('utf-8', 'ignore')
-            result = re.search('ID_SERIAL_SHORT=(.*)\n', udev_output)
-            if self.serial == result.group(1):
-                logging.info('Found wifi dongle %s at serial port %s' %
-                             (self.serial, tty_port))
-                return tty_port
-        raise ArduinoWifiDongleError('Wifi dongle %s is specified in config'
-                                     ' but is not attached.' % self.serial)
-
-    def write(self, arduino, file_path, network=None):
-        """Write an ino file to the arduino wifi dongle.
-
-        Args:
-            arduino: path of the arduino executable.
-            file_path: path of the ino file to flash onto the dongle.
-            network: wifi network to connect to.
-
-        Returns:
-            True: if the write is sucessful.
-            False: if not.
-        """
-        return_result = True
-        self.stop_controller_log('Flashing %s\n' % file_path)
-        cmd = arduino + file_path + ' --upload --port ' + self.port
-        if network:
-            cmd = self._update_ino_wifi_network(arduino, file_path, network)
-        self.log.info('Command is %s' % cmd)
-        proc = subprocess.Popen(cmd,
-                                stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-                                shell=True)
-        _, _ = proc.communicate()
-        return_code = proc.returncode
-        if return_code != 0:
-            self.log.error('Failed to write file %s' % return_code)
-            return_result = False
-        self.start_controller_log('Flashing complete\n')
-        return return_result
-
-    def _update_ino_wifi_network(self, arduino, file_path, network):
-        """Update wifi network in the ino file.
-
-        Args:
-            arduino: path of the arduino executable.
-            file_path: path of the ino file to flash onto the dongle
-            network: wifi network to update the ino file with
-
-        Returns:
-            cmd: arduino command to run to flash the .ino file
-        """
-        tmp_file = '%s%s' % (TMP_DIR, file_path.split('/')[-1])
-        utils.exe_cmd('cp %s %s' % (file_path, tmp_file))
-        ssid = network[SSID_KEY]
-        pwd = network[PWD_KEY]
-        sed_cmd = 'sed -i \'s/"wifi_tethering_test"/"%s"/\' %s' % (
-            ssid, tmp_file)
-        utils.exe_cmd(sed_cmd)
-        sed_cmd = 'sed -i  \'s/"password"/"%s"/\' %s' % (pwd, tmp_file)
-        utils.exe_cmd(sed_cmd)
-        cmd = "%s %s --upload --port %s" % (arduino, tmp_file, self.port)
-        return cmd
-
-    def start_controller_log(self, msg=None):
-        """Reads the serial port and writes the data to ACTS log file.
-
-        This method depends on the logging enabled in the .ino files. The logs
-        are read from the serial port and are written to the ACTS log after
-        adding a timestamp to the data.
-
-        Args:
-            msg: Optional param to write to the log file.
-        """
-        if msg:
-            curr_time = str(datetime.now())
-            self.log_file_fd.write(curr_time + ' INFO: ' + msg)
-        t = threading.Thread(target=self._start_log)
-        t.daemon = True
-        t.start()
-
-    def stop_controller_log(self, msg=None):
-        """Stop the controller log.
-
-        Args:
-            msg: Optional param to write to the log file.
-        """
-        with self.lock:
-            self.set_logging = False
-        if msg:
-            curr_time = str(datetime.now())
-            self.log_file_fd.write(curr_time + ' INFO: ' + msg)
-
-    def _start_log(self):
-        """Target method called by start_controller_log().
-
-        This method is called as a daemon thread, which continuously reads the
-        serial port. Stops when set_logging is set to False or when the test
-        ends.
-        """
-        self.set_logging = True
-        ser = Serial(self.port, BAUD_RATE)
-        while True:
-            curr_time = str(datetime.now())
-            data = ser.readline().decode('utf-8', 'ignore')
-            self._set_vars(data)
-            with self.lock:
-                if not self.set_logging:
-                    break
-            self.log_file_fd.write(curr_time + " " + data)
-
-    def _set_vars(self, data):
-        """Sets the variables by reading from the serial port.
-
-        Wifi dongle data such as wifi status, ip address, scan results
-        are read from the serial port and saved inside the class.
-
-        Args:
-            data: New line from the serial port.
-        """
-        # 'data' represents each line retrieved from the device's serial port.
-        # since we depend on the serial port logs to get the attributes of the
-        # dongle, every line has the format of {ino_file: method: param: value}.
-        # We look for the attribute in the log and retrieve its value.
-        # Ex: data = "connect_wifi: loop(): STATUS: 3" then val = "3"
-        # Similarly, we check when the scan has begun and ended and get all the
-        # scan results in between.
-        if data.count(':') != 3:
-            return
-        val = data.split(':')[-1].lstrip().rstrip()
-        if SCAN_BEGIN in data:
-            self.scan_results = []
-            self.scanning = True
-        elif SCAN_END in data:
-            self.scanning = False
-        elif self.scanning:
-            self.scan_results.append(data)
-        elif IP in data:
-            self.ip_addr = None if val == '0.0.0.0' else val
-        elif SSID in data:
-            self.ssid = val
-        elif STATUS in data:
-            self.status = int(val)
-        elif PING in data:
-            self.ping = int(val) != 0
-
-    def ip_address(self, exp_result=True, timeout=READ_TIMEOUT):
-        """Get the ip address of the wifi dongle.
-
-        Args:
-            exp_result: True if IP address is expected (wifi connected).
-            timeout: Optional param that specifies the wait time for the IP
-                     address to come up on the dongle.
-
-        Returns:
-            IP: addr in string, if wifi connected.
-                None if not connected.
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.ip_addr) or (
-                    not exp_result and not self.ip_addr):
-                break
-            time.sleep(1)
-        return self.ip_addr
-
-    def wifi_status(self, exp_result=True, timeout=READ_TIMEOUT):
-        """Get wifi status on the dongle.
-
-        Returns:
-            True: if wifi is connected.
-            False: if not connected.
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.status == 3) or (
-                    not exp_result and not self.status):
-                break
-            time.sleep(1)
-        return self.status == 3
-
-    def wifi_scan(self, exp_result=True, timeout=READ_TIMEOUT):
-        """Get the wifi scan results.
-
-        Args:
-            exp_result: True if scan results are expected.
-            timeout: Optional param that specifies the wait time for the scan
-                     results to come up on the dongle.
-
-        Returns:
-            list of dictionaries each with SSID and RSSI of the network
-            found in the scan.
-        """
-        scan_networks = []
-        d = {}
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.scan_results) or (
-                    not exp_result and not self.scan_results):
-                break
-            time.sleep(1)
-        for i in range(len(self.scan_results)):
-            if SSID in self.scan_results[i]:
-                d.clear()
-                d[SSID] = self.scan_results[i].split(':')[-1].rstrip()
-            elif RSSI in self.scan_results[i]:
-                d[RSSI] = self.scan_results[i].split(':')[-1].rstrip()
-                scan_networks.append(d)
-
-        return scan_networks
-
-    def ping_status(self, exp_result=True, timeout=READ_TIMEOUT):
-        """ Get ping status on the dongle.
-
-        Returns:
-            True: if ping is successful
-            False: if not successful
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.ping) or (not exp_result and not self.ping):
-                break
-            time.sleep(1)
-        return self.ping
diff --git a/src/antlion/controllers/asus_axe11000_ap.py b/src/antlion/controllers/asus_axe11000_ap.py
deleted file mode 100644
index d4372ac..0000000
--- a/src/antlion/controllers/asus_axe11000_ap.py
+++ /dev/null
@@ -1,763 +0,0 @@
-"""Controller for Asus AXE11000 access point."""
-
-import time
-from antlion import logger
-from selenium import webdriver
-from selenium.common.exceptions import NoSuchElementException
-from selenium.webdriver.chrome.options import Options
-from selenium.webdriver.support.ui import Select
-
-MOBLY_CONTROLLER_CONFIG_NAME = "AsusAXE11000AP"
-ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
-
-# Access point UI parameters
-USERNAME = "login_username"
-PASSWORD = "login_passwd"
-SIGN_IN_ID = "button"
-APPLY_BUTTON = "apply_btn"
-APPLY_BUTTON_ID = "applyButton"
-WIRELESS_SETTINGS = "Advanced_Wireless_Content_menu"
-GENERAL_TAB = "Advanced_Wireless_Content_tab"
-PROFESSIONAL_TAB = "Advanced_WAdvanced_Content_tab"
-HE_MODE_ID = "he_mode_field"
-WL_UNIT = "wl_unit"
-WL_11AX = "wl_11ax"
-WL_RADIO = "wl_radio"
-WL_CLOSED = "wl_closed"
-RADIO = "radio"
-BAND_2G_CHANNEL = "band0_channel"
-BAND_5G_CHANNEL = "band1_channel"
-BAND_6G_CHANNEL = "band2_channel"
-BAND_2G_AUTH = "band0_auth_mode_x"
-BAND_5G_AUTH = "band1_auth_mode_x"
-BAND_6G_AUTH = "band2_auth_mode_x"
-BAND_2G_SSID = "band0_ssid"
-BAND_5G_SSID = "band1_ssid"
-BAND_6G_SSID = "band2_ssid"
-BAND_2G_PSK = "band0_wpa_psk"
-BAND_5G_PSK = "band1_wpa_psk"
-BAND_6G_PSK = "band2_wpa_psk"
-BAND_2G_RAD_IP = "band0_radius_ipaddr"
-BAND_5G_RAD_IP = "band1_radius_ipaddr"
-BAND_2G_RAD_PORT = "band0_radius_port"
-BAND_5G_RAD_PORT = "band1_radius_port"
-BAND_2G_RAD_KEY = "band0_radius_key"
-BAND_5G_RAD_KEY = "band1_radius_key"
-SMART_CONNECT = "smartcon_enable_field"
-BROWSER_WAIT_SHORT_TIMEOUT = 6
-BROWSER_WAIT_TIMEOUT = 15
-BROWSER_WAIT_LONG_TIMEOUT = 90
-BROWSER_WAIT_VERY_LONG_TIMEOUT = 180
-
-# Access point supported modes, channels
-VALID_BANDS = ["2g", "5g", "6g"]
-WL_BAND_VALUE = {"2g": "0", "5g": "1", "6g": "2"}
-CHANNELS_2G = {
-    0: "0",
-    1: "1",
-    2: "2",
-    3: "3",
-    4: "4",
-    5: "5",
-    6: "6",
-    7: "7",
-    8: "8",
-    9: "9",
-    10: "10",
-    11: "11"
-}
-CHANNELS_5G = {
-    0: "0",
-    36: "36/160",
-    40: "40/160",
-    44: "44/160",
-    48: "48/160",
-    52: "52/160",
-    56: "56/160",
-    60: "60/160",
-    64: "64/160",
-    100: "100/160",
-    104: "104/160",
-    108: "108/160",
-    112: "112/160",
-    116: "116/160",
-    120: "120/160",
-    124: "124/160",
-    128: "128/160",
-    132: "132/80",
-    136: "136/80",
-    140: "140/80",
-    144: "144/80",
-    149: "149/80",
-    153: "153/80",
-    157: "157/80",
-    161: "161/80",
-    165: "165"
-}
-CHANNELS_6G = {
-    0: "0",
-    37: "6g37/160",
-    53: "6g53/160",
-    69: "6g69/160",
-    85: "6g85/160",
-    101: "6g101/160",
-    117: "6g117/160",
-    133: "6g133/160",
-    149: "6g149/160",
-    165: "6g165/160",
-    181: "6g181/160",
-    197: "6g197/160",
-    213: "6g213/160"
-}
-
-
-def create(configs):
-  """Creates ap controllers from a json config."""
-  return [AsusAXE11000AP(c) for c in configs]
-
-
-def destroy(aps):
-  """Destroys a list of ap controllers."""
-  for ap in aps:
-    ap.reset_to_default_ap_settings()
-    ap.driver.quit()
-
-
-class AsusAXE11000AP(object):
-  """Asus AXE11000 AccessPoint controller.
-
-  Controller class for Asus AXE11000 6GHz AP. This class provides methods to
-  configure the AP with different settings required for 11ax and 6GHz testing.
-  The controller uses chrome webdriver to communicate with the AP.
-
-  The controller object is initiated in the test class. The ACTS test runner
-  calls this controller using the 'AsusAXE11000AP' keyword in the ACTS config
-  file. The AP is reset to default settings and this is handled during the
-  test teardown.
-
-  Attributes:
-    ip: IP address to reach the AP.
-    port: Port numnber to reach the AP.
-    protocol: Protcol to reach the AP (http/https).
-    username: Username to login to the AP.
-    password: Password to login to the AP.
-    config_page: web url to login to the AP.
-    ap_settings: AP settings configured at any given point.
-    default_ap_settings: Default AP settings before running the tests.
-    driver: chrome webdriver object to update the settings.
-  """
-
-  def __init__(self, config):
-    """Initialize AP.
-
-    Creates a chrome webdriver object based on the router parameters.
-    The webdriver will login to the router and goes to the wireless settings
-    page. This object will be used to change the router settings required for
-    the test cases. Required parameters are <ip_address>, <port>, <protocol>,
-    <admin_username> and <admin_password>.
-
-    Url: <procotol>://<ip_address>:<port>/Main_Login.asp
-    Login: <admin_username>/<admin_password>
-
-    Args:
-      config: dict, dictionary of router parameters required for webdriver.
-    """
-    self.ip = config["ip_address"]
-    self.port = config["port"]
-    self.protocol = config["protocol"]
-    self.username = config["admin_username"]
-    self.password = config["admin_password"]
-    lambda_msg = lambda msg: "[AsusAXE11000AP|%s] %s" % (self.ip, msg)
-    self.log = logger.create_logger(lambda_msg)
-    self.ap_settings = {"2g": {}, "5g": {}, "6g": {},}
-    self.config_page = (
-        "{protocol}://{ip_address}:{port}/Main_Login.asp").format(
-            protocol=self.protocol, ip_address=self.ip, port=self.port)
-    self.chrome_options = Options()
-    self.chrome_options.add_argument("--headless")
-    self.chrome_options.add_argument("--no-sandbox")
-    self.driver = webdriver.Chrome(options=self.chrome_options)
-    self.driver.implicitly_wait(BROWSER_WAIT_TIMEOUT*2)
-    self.driver.get(self.config_page)
-    self.driver.find_element_by_name(USERNAME).send_keys(self.username)
-    self.driver.find_element_by_name(PASSWORD).send_keys(self.password)
-    self.driver.find_element_by_id(SIGN_IN_ID).click()
-    self._wait_for_web_element(self.driver.find_element_by_id,
-                               WIRELESS_SETTINGS)
-    self.driver.find_element_by_id(WIRELESS_SETTINGS).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-    self._update_ap_settings()
-    self.default_ap_settings = self.ap_settings.copy()
-
-  ### Helper methods ###
-
-  def _wait_for_web_element(self,
-                            find_element,
-                            element,
-                            attribute=None,
-                            value=None):
-    """Verifies click actions/selections work.
-
-    Args:
-      find_element: func(), webdriver method to call
-      element: str, web element to look for. Ex: id, class, name
-      attribute: str, attribute to get from a webelement
-      value: str, verify attribute is set to the correct value
-
-    Raises:
-      ValueError: An error occurred if expected attribute not found.
-    """
-    curr_time = time.time()
-    while time.time() < curr_time + BROWSER_WAIT_TIMEOUT*4:
-      time.sleep(2)
-      try:
-        x = find_element(element)
-        if attribute and str(value) not in x.get_attribute(attribute):
-          raise ValueError("Attribute is not set to the right value")
-        return
-      except NoSuchElementException:
-        pass
-    raise ValueError("Failed to find web element: %s" % element)
-
-  def _update_ap_settings_2g_band(self):
-    """Read settings configured on 2g band.
-
-    Parameters Updated:
-      security type: open, wpa2-psk, wpa3-sae or wpa2-ent.
-      ssid: SSID of the wifi network.
-      password: password of the wifi network (if psk or sae network).
-      radius server ip: Radius server IP addr (if ent network).
-      radius server port: Radius server Port number (if ent network).
-      radius server secret: Radius server secret (if ent network).
-      channel: 2G band channel.
-    """
-    dict_2g = {}
-    dict_2g["security"] = self.driver.find_element_by_name(
-        BAND_2G_AUTH).get_attribute("value")
-    dict_2g["SSID"] = self.driver.find_element_by_name(
-        BAND_2G_SSID).get_attribute("value")
-    if dict_2g["security"] == "psk2" or dict_2g["security"] == "sae":
-      dict_2g["password"] = self.driver.find_element_by_name(
-          BAND_2G_PSK).get_attribute("value")
-    elif dict_2g["security"] == "wpa2":
-      dict_2g["radius_ip_addr"] = self.driver.find_element_by_name(
-          BAND_2G_RAD_IP).get_attribute("value")
-      dict_2g["radius_port"] = self.driver.find_element_by_name(
-          BAND_2G_RAD_PORT).get_attribute("value")
-      dict_2g["radius_secret"] = self.driver.find_element_by_name(
-          BAND_2G_RAD_KEY).get_attribute("value")
-    channel_field = self._get_webdriver_elements_for_channels("2g")
-    ch_val = self.driver.find_element_by_name(channel_field).get_attribute(
-        "value")
-    channel = 0
-    for key, val in CHANNELS_2G.items():
-      if val == ch_val:
-        channel = key
-        break
-    self.ap_settings["2g"] = dict_2g.copy()
-    self.ap_settings["2g"]["channel"] = channel
-
-  def _update_ap_settings_5g_band(self):
-    """Read settings configured on 5g band.
-
-    Parameters Updated:
-      security type: open, wpa2-psk, wpa3-sae or wpa2-ent.
-      ssid: SSID of the wifi network.
-      password: password of the wifi network (if psk or sae network).
-      radius server ip: Radius server IP addr (if ent network).
-      radius server port: Radius server Port number (if ent network).
-      radius server secret: Radius server secret (if ent network).
-      channel: 5G band channel.
-    """
-    dict_5g = {}
-    dict_5g["security"] = self.driver.find_element_by_name(
-        BAND_5G_AUTH).get_attribute("value")
-    dict_5g["SSID"] = self.driver.find_element_by_name(
-        BAND_5G_SSID).get_attribute("value")
-    if dict_5g["security"] == "psk2" or dict_5g["security"] == "sae":
-      dict_5g["password"] = self.driver.find_element_by_name(
-          BAND_5G_PSK).get_attribute("value")
-    elif dict_5g["security"] == "wpa2":
-      dict_5g["radius_ip_addr"] = self.driver.find_element_by_name(
-          BAND_5G_RAD_IP).get_attribute("value")
-      dict_5g["radius_port"] = self.driver.find_element_by_name(
-          BAND_5G_RAD_PORT).get_attribute("value")
-      dict_5g["radius_secret"] = self.driver.find_element_by_name(
-          BAND_5G_RAD_KEY).get_attribute("value")
-    channel_field = self._get_webdriver_elements_for_channels("5g")
-    ch_val = self.driver.find_element_by_name(channel_field).get_attribute(
-        "value")
-    channel = 0
-    for key, val in CHANNELS_5G.items():
-      if val == ch_val:
-        channel = key
-        break
-    self.ap_settings["5g"] = dict_5g.copy()
-    self.ap_settings["5g"]["channel"] = channel
-
-  def _update_ap_settings_6g_band(self):
-    """Read settings configured on 6g band.
-
-    Parameters Updated:
-      security type: wpa3-owe, wpa3-sae.
-      ssid: SSID of the wifi network.
-      password: password of the wifi network (if sae network).
-      channel: 6G band channel.
-    """
-    dict_6g = {}
-    dict_6g["security"] = self.driver.find_element_by_name(
-        BAND_6G_AUTH).get_attribute("value")
-    dict_6g["SSID"] = self.driver.find_element_by_name(
-        BAND_6G_SSID).get_attribute("value")
-    if dict_6g["security"] == "sae":
-      dict_6g["password"] = self.driver.find_element_by_name(
-          BAND_6G_PSK).get_attribute("value")
-    channel_field = self._get_webdriver_elements_for_channels("6g")
-    ch_val = self.driver.find_element_by_name(channel_field).get_attribute(
-        "value")
-    channel = 0
-    for key, val in CHANNELS_6G.items():
-      if val == ch_val:
-        channel = key
-        break
-    self.ap_settings["6g"] = dict_6g.copy()
-    self.ap_settings["6g"]["channel"] = channel
-
-  def _update_ap_settings(self):
-    """Read AP settings of 2G, 5G and 6G bands.
-
-    This method reads the wifi network currently configured on any particular
-    band. The settings are updated to self.ap_settings object.
-    """
-    self.driver.refresh()
-    self._update_ap_settings_2g_band()
-    self._update_ap_settings_5g_band()
-    self._update_ap_settings_6g_band()
-
-  def _get_webdriver_elements_for_channels(self, band):
-    """Return webdriver elements for the band to configure channel.
-
-    Args:
-      band: str, Wifi band to configure. Ex: 2g, 5g, 6g.
-
-    Returns:
-      channel field for the specific band.
-    """
-    channel_field = BAND_2G_CHANNEL
-    if band == "5g":
-      channel_field = BAND_5G_CHANNEL
-    elif band == "6g":
-      channel_field = BAND_6G_CHANNEL
-    return channel_field
-
-  def _set_channel(self, band, channel):
-    """Configure channel on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      channel: int, Channel to set.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if (band == "2g" and channel not in CHANNELS_2G) or (
-        band == "5g" and
-        channel not in CHANNELS_5G) or (band == "6g" and
-                                        channel not in CHANNELS_6G):
-      raise ValueError("Channel %s is not supported in band %s" %
-                       (channel, band))
-    channel_field = self._get_webdriver_elements_for_channels(band)
-    channels_val_dict = CHANNELS_6G
-    if band == "2g":
-      channels_val_dict = CHANNELS_2G
-    elif band == "5g":
-      channels_val_dict = CHANNELS_5G
-    channel = channels_val_dict[channel]
-
-    # Set channel
-    if self.driver.find_element_by_name(channel_field).get_attribute(
-        "value") != channel:
-      css_selector = "select[name=%s]" % channel_field
-      Select(self.driver.find_element_by_css_selector(
-          css_selector)).select_by_value(channel)
-      time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-
-  def _configure_personal_network(self, band, auth, ssid=None, password=None):
-    """Configure wpa3 sae/wpa2 psk network on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      auth: str, WPA2 PSK or WPA3 SAE security.
-      ssid: str, ssid of the wifi network.
-      password: str, password of the wifi network.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if band == "6g" and auth == "psk2":
-      raise ValueError("AP doesn't support WPA2 PSK on 6g band.")
-    (auth_field, ssid_field,
-     psk_field) = self._get_webdriver_elements_for_personal_auth(band)
-
-    # configure personal network
-    css_selector = "select[name=%s]" % auth_field
-    Select(self.driver.find_element_by_css_selector(
-        css_selector)).select_by_value(auth)
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-    if ssid:
-      self.driver.find_element_by_name(ssid_field).clear()
-      self.driver.find_element_by_name(ssid_field).send_keys(ssid)
-    if password:
-      self.driver.find_element_by_name(psk_field).clear()
-      self.driver.find_element_by_name(psk_field).send_keys(password)
-
-  def _configure_open_owe_network(self, band, auth, ssid=None):
-    """Configure wpa3 owe/open network on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      auth: str, WPA2 PSK or WPA3 SAE security.
-      ssid: str, ssid of the wifi network.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if band == "6g" and auth == "open":
-      raise ValueError("AP doesn't support open network on 6g band.")
-    if (band == "2g" or band == "5g") and auth == "owe":
-      raise ValueError("AP doesn't support OWE on 2g and 5g bands.")
-    (auth_field, ssid_field,
-     _) = self._get_webdriver_elements_for_personal_auth(band)
-
-    # Configure wifi network
-    css_selector = "select[name=%s]" % auth_field
-    Select(self.driver.find_element_by_css_selector(
-        css_selector)).select_by_value(auth)
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-    if ssid:
-      self.driver.find_element_by_name(ssid_field).clear()
-      self.driver.find_element_by_name(ssid_field).send_keys(ssid)
-
-  def _configure_wpa2_ent_network(self, band, radius_ip, radius_port,
-                                  radius_secret, ssid=None):
-    """Configure wpa2 ent network on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g.
-      radius_ip: str, radius server ip addr.
-      radius_port: str, radius server port number.
-      radius_secret: str, radius server secret.
-      ssid: str, ssid of the wifi network.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if band == "6g":
-      raise ValueError("6GHz doesn't support enterprise network on this AP.")
-    (auth_field, ssid_field,
-     _) = self._get_webdriver_elements_for_personal_auth(band)
-    (rad_ip_field, rad_port_field,
-     rad_key_field) = self._get_webdriver_elements_for_ent_auth(band)
-
-    # Set enterprise network
-    css_selector = "select[name=%s]" % auth_field
-    Select(self.driver.find_element_by_css_selector(
-        css_selector)).select_by_value("wpa2")
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-    if ssid:
-      self.driver.find_element_by_name(ssid_field).clear()
-      self.driver.find_element_by_name(ssid_field).send_keys(ssid)
-    self.driver.find_element_by_name(rad_ip_field).clear()
-    self.driver.find_element_by_name(rad_ip_field).send_keys(radius_ip)
-    self.driver.find_element_by_name(rad_port_field).clear()
-    self.driver.find_element_by_name(rad_port_field).send_keys(radius_port)
-    self.driver.find_element_by_name(rad_key_field).clear()
-    self.driver.find_element_by_name(rad_key_field).send_keys(radius_secret)
-
-  def _get_webdriver_elements_for_personal_auth(self, band):
-    """Return webdriver elements for the band to configure personal auth.
-
-    Args:
-      band: str, Wifi band to configure. Ex: 2g, 5g, 6g.
-
-    Returns:
-      tuple of auth, ssid, psk field for the band.
-    """
-    auth_field = BAND_2G_AUTH
-    ssid_field = BAND_2G_SSID
-    psk_field = BAND_2G_PSK
-    if band == "5g":
-      auth_field = BAND_5G_AUTH
-      ssid_field = BAND_5G_SSID
-      psk_field = BAND_5G_PSK
-    elif band == "6g":
-      auth_field = BAND_6G_AUTH
-      ssid_field = BAND_6G_SSID
-      psk_field = BAND_6G_PSK
-    return (auth_field, ssid_field, psk_field)
-
-  def _get_webdriver_elements_for_ent_auth(self, band):
-    """Return webdriver elements for the band to configure ent auth.
-
-    Args:
-      band: str, Wifi band to configure. Ex: 2g, 5g, 6g.
-
-    Returns:
-      tuple of radius server IP, port, secret for the band.
-    """
-    rad_ip_field = BAND_2G_RAD_IP
-    rad_port_field = BAND_2G_RAD_PORT
-    rad_key_field = BAND_2G_RAD_KEY
-    if band == "5g":
-      rad_ip_field = BAND_5G_RAD_IP
-      rad_port_field = BAND_5G_RAD_PORT
-      rad_key_field = BAND_5G_RAD_KEY
-    return (rad_ip_field, rad_port_field, rad_key_field)
-
-  ### Methods to configure AP ###
-
-  def set_channel_and_apply(self, band, channel):
-    """Set channel for specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      channel: int, Channel to set.
-    """
-    # Go back to General tab in advanced settings
-    self.driver.find_element_by_id(GENERAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-    channel_field = self._get_webdriver_elements_for_channels(band)
-    self._set_channel(band, channel)
-    self.driver.find_element_by_id(APPLY_BUTTON_ID).click()
-    time.sleep(BROWSER_WAIT_LONG_TIMEOUT)
-    self._wait_for_web_element(self.driver.find_element_by_name,
-                               channel_field, "value", channel)
-    self._update_ap_settings()
-
-  def get_configured_channel(self, band):
-    """Get the channel configured on specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: eg, 5g, 6g.
-
-    Returns:
-      Channel configured on the band.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    return self.ap_settings[band]["channel"]
-
-  def configure_ap(self, network_dict):
-    """Configure AP with settings for different bands.
-
-    Args:
-      network_dict: dict, dictionary that holds configuration for each band.
-    """
-    # Go back to General tab in advanced settings
-    self.driver.refresh()
-    self.driver.find_element_by_id(GENERAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-    # configure wireless settings
-    self.log.info("Network dictionary: %s" % network_dict)
-    for band in network_dict:
-      security = network_dict[band]["security"]
-      ssid = network_dict[band]["SSID"] if "SSID" in network_dict[
-          band] else None
-      password = network_dict[band]["password"] if "password" in network_dict[
-          band] else None
-      if security == "open" or security == "owe":
-        self._configure_open_owe_network(band, security, ssid)
-      elif security == "psk2" or security == "sae":
-        self._configure_personal_network(band, security, ssid, password)
-      elif network_dict[band]["security"] == "wpa2":
-        self._configure_wpa2_ent_network(
-            band,
-            network_dict[band]["radius_server_ip"],
-            network_dict[band]["radius_server_port"],
-            network_dict[band]["radius_server_secret"],
-            ssid)
-
-    for band in network_dict:
-      if "channel" in network_dict[band]:
-        self._set_channel(band, network_dict[band]["channel"])
-    self.driver.find_element_by_id(APPLY_BUTTON_ID).click()
-    time.sleep(BROWSER_WAIT_LONG_TIMEOUT)
-
-    # update ap settings
-    self._update_ap_settings()
-
-    # configure hidden or 11ax mode
-    for band in network_dict:
-      apply_settings = False
-      if "hidden" in network_dict[band]:
-        res = self._configure_hidden_network(band, network_dict[band]["hidden"])
-        apply_settings = apply_settings or res
-      if "11ax" in network_dict[band]:
-        res = self._configure_11ax_mode(band, network_dict[band]["11ax"])
-        apply_settings = apply_settings or res
-      if apply_settings:
-        self.driver.find_element_by_id(APPLY_BUTTON).click()
-        time.sleep(BROWSER_WAIT_VERY_LONG_TIMEOUT)
-
-  def get_wifi_network(self, band):
-    """Get wifi network configured on the AP for the specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-
-    Returns:
-      Wifi network as a dictionary.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    wifi_network = {}
-    wifi_network["SSID"] = self.ap_settings[band]["SSID"]
-    if "password" in self.ap_settings[band]:
-      wifi_network["password"] = self.ap_settings[band]["password"]
-    security = self.ap_settings[band]["security"]
-    if security == "sae" or security == "owe":
-      wifi_network["security"] = security
-    return wifi_network
-
-  def _configure_hidden_network(self, band, val):
-    """Configure hidden network for a specific band.
-
-    Args:
-      band: str, Wifi band to configure hidden network.
-      val: str, String value to configure.
-
-    Returns:
-      True if settings applied, False if not.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-
-    # Go to Professional tab in advanced settings
-    self.driver.find_element_by_id(PROFESSIONAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, HE_MODE_ID)
-
-    # Select the requested band from the drop down menu
-    css_selector = "select[name=%s]" % WL_UNIT
-    Select(
-        self.driver.find_element_by_css_selector(css_selector)).select_by_value(
-            WL_BAND_VALUE[band])  # (TODO: gmoturu@) find if selection worked
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-
-    # Configure hidden network
-    state = True if val == "1" else False
-    return_result = False
-    if self.driver.find_element_by_name(WL_CLOSED).is_selected() != state:
-      css_selector = "input[name='%s'][value='%s']" % (WL_CLOSED, val)
-      self.driver.find_element_by_css_selector(css_selector).click()
-      time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-      return_result = True
-
-    return return_result
-
-  def configure_hidden_network_and_apply(self, band, state=True):
-    """Configure hidden network for a specific band.
-
-    Args:
-      band: str, Wifi band to configure hidden network.
-      state: bool, Set the wifi network as hidden if True, False if not.
-    """
-    val = "1" if state else "0"
-    if self._configure_hidden_network(band, val):
-      self.driver.find_element_by_id(APPLY_BUTTON).click()
-      time.sleep(BROWSER_WAIT_VERY_LONG_TIMEOUT)
-      if self.driver.find_element_by_name(WL_CLOSED).is_selected() != state:
-        raise ValueError("Failed to configure hidden network on band: %s" % band)
-
-      # Go back to General tab in advanced settings
-      self.driver.find_element_by_id(GENERAL_TAB).click()
-      self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-  def _configure_11ax_mode(self, band, val):
-    """Configure 11ax mode on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      val: str, String value to configure.
-
-    Returns:
-      True if settings are applied, False if not.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-
-    # Go to Professional tab in advanced settings
-    self.driver.find_element_by_id(PROFESSIONAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, HE_MODE_ID)
-
-    # Select the requested band from the drop down menu
-    css_selector = "select[name=%s]" % WL_UNIT
-    Select(
-        self.driver.find_element_by_css_selector(css_selector)).select_by_value(
-            WL_BAND_VALUE[band])  # (TODO: gmoturu@) find if selection worked
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-
-    # Configure 11ax
-    return_result = False
-    if self.driver.find_element_by_name(WL_11AX).get_attribute(
-        "value") != val:
-      css_selector = "select[name=%s]" % WL_11AX
-      Select(self.driver.find_element_by_css_selector(
-          css_selector)).select_by_value(val)
-      time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-      return_result = True
-
-    return return_result
-
-  def configure_11ax_mode_and_apply(self, band, state=True):
-    """Configure 11ax mode on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      state: bool, Enable 11ax if True, disable if False
-    """
-    val = "1" if state else "0"
-    if self._configure_11ax_mode(band, val):
-      self.driver.find_element_by_id(APPLY_BUTTON).click()
-      time.sleep(BROWSER_WAIT_VERY_LONG_TIMEOUT)
-      self._wait_for_web_element(self.driver.find_element_by_name, WL_11AX,
-                                 "value", val)
-
-      # Go back to General tab in advanced settings
-      self.driver.find_element_by_id(GENERAL_TAB).click()
-      self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-  def reset_to_default_ap_settings(self):
-    """Reset AP to the default settings."""
-    if self.default_ap_settings != self.ap_settings:
-      self.configure_ap(self.default_ap_settings)
-
diff --git a/src/antlion/controllers/attenuator.py b/src/antlion/controllers/attenuator.py
index ff68f5b..440e90a 100644
--- a/src/antlion/controllers/attenuator.py
+++ b/src/antlion/controllers/attenuator.py
@@ -20,21 +20,23 @@
 from antlion.keys import Config
 from antlion.libs.proc import job
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'Attenuator'
-ACTS_CONTROLLER_REFERENCE_NAME = 'attenuators'
+MOBLY_CONTROLLER_CONFIG_NAME = "Attenuator"
+ACTS_CONTROLLER_REFERENCE_NAME = "attenuators"
 _ATTENUATOR_OPEN_RETRIES = 3
 
 
 def create(configs):
     objs = []
     for c in configs:
-        attn_model = c['Model']
+        attn_model = c["Model"]
         # Default to telnet.
-        protocol = c.get('Protocol', 'telnet')
-        module_name = 'antlion.controllers.attenuator_lib.%s.%s' % (attn_model,
-                                                                 protocol)
+        protocol = c.get("Protocol", "telnet")
+        module_name = "antlion.controllers.attenuator_lib.%s.%s" % (
+            attn_model,
+            protocol,
+        )
         module = importlib.import_module(module_name)
-        inst_cnt = c['InstrumentCount']
+        inst_cnt = c["InstrumentCount"]
         attn_inst = module.AttenuatorInstrument(inst_cnt)
         attn_inst.model = attn_model
 
@@ -45,27 +47,30 @@
             try:
                 attn_inst.open(ip_address, port)
             except Exception as e:
-                logging.error('Attempt %s to open connection to attenuator '
-                              'failed: %s' % (attempt_number, e))
+                logging.error(
+                    "Attempt %s to open connection to attenuator "
+                    "failed: %s" % (attempt_number, e)
+                )
                 if attempt_number == _ATTENUATOR_OPEN_RETRIES:
-                    ping_output = job.run('ping %s -c 1 -w 1' % ip_address,
-                                          ignore_status=True)
+                    ping_output = job.run(
+                        "ping %s -c 1 -w 1" % ip_address, ignore_status=True
+                    )
                     if ping_output.exit_status == 1:
-                        logging.error('Unable to ping attenuator at %s' %
-                                      ip_address)
+                        logging.error("Unable to ping attenuator at %s" % ip_address)
                     else:
-                        logging.error('Able to ping attenuator at %s' %
-                                      ip_address)
-                        job.run('echo "q" | telnet %s %s' % (ip_address, port),
-                                ignore_status=True)
+                        logging.error("Able to ping attenuator at %s" % ip_address)
+                        job.run(
+                            'echo "q" | telnet %s %s' % (ip_address, port),
+                            ignore_status=True,
+                        )
                     raise
         for i in range(inst_cnt):
             attn = Attenuator(attn_inst, idx=i)
-            if 'Paths' in c:
+            if "Paths" in c:
                 try:
-                    setattr(attn, 'path', c['Paths'][i])
+                    setattr(attn, "path", c["Paths"][i])
                 except IndexError:
-                    logging.error('No path specified for attenuator %d.', i)
+                    logging.error("No path specified for attenuator %d.", i)
                     raise
             objs.append(attn)
     return objs
@@ -84,7 +89,7 @@
     for attenuator in attenuators:
         info = {
             "Address": attenuator.instrument.address,
-            "Attenuator_Port": attenuator.idx
+            "Attenuator_Port": attenuator.idx,
         }
         device_info.append(info)
     return device_info
@@ -95,8 +100,7 @@
         attn.instrument.close()
 
 
-def get_attenuators_for_device(device_attenuator_configs, attenuators,
-                               attenuator_key):
+def get_attenuators_for_device(device_attenuator_configs, attenuators, attenuator_key):
     """Gets the list of attenuators associated to a specified device and builds
     a list of the attenuator objects associated to the ip address in the
     device's section of the ACTS config and the Attenuator's IP address.  In the
@@ -155,9 +159,10 @@
     for device_attenuator_config in device_attenuator_configs:
         for attenuator_port in device_attenuator_config[attenuator_key]:
             for attenuator in attenuators:
-                if (attenuator.instrument.address ==
-                        device_attenuator_config['Address']
-                        and attenuator.idx is attenuator_port):
+                if (
+                    attenuator.instrument.address == device_attenuator_config["Address"]
+                    and attenuator.idx is attenuator_port
+                ):
                     attenuator_list.append(attenuator)
     return attenuator_list
 
@@ -175,7 +180,7 @@
 
 
 class InvalidDataError(AttenuatorError):
-    """"Raised when an unexpected result is seen on the transport layer.
+    """ "Raised when an unexpected result is seen on the transport layer.
 
     When this exception is seen, closing an re-opening the link to the
     attenuator instrument is probably necessary. Something has gone wrong in
@@ -202,6 +207,7 @@
     constructors. Outside of setup/teardown, devices should be accessed via
     this generic "interface".
     """
+
     model = None
     INVALID_MAX_ATTEN = 999.9
 
@@ -219,8 +225,7 @@
         """
 
         if type(self) is AttenuatorInstrument:
-            raise NotImplementedError(
-                'Base class should not be instantiated directly!')
+            raise NotImplementedError("Base class should not be instantiated directly!")
 
         self.num_atten = num_atten
         self.max_atten = AttenuatorInstrument.INVALID_MAX_ATTEN
@@ -238,7 +243,7 @@
                 bounds values to 0 or max_atten.
             retry: if True, command will be retried if possible
         """
-        raise NotImplementedError('Base class should not be called directly!')
+        raise NotImplementedError("Base class should not be called directly!")
 
     def get_atten(self, idx, retry=False):
         """Returns the current attenuation of the attenuator at index idx.
@@ -251,7 +256,7 @@
         Returns:
             The current attenuation value as a floating point value
         """
-        raise NotImplementedError('Base class should not be called directly!')
+        raise NotImplementedError("Base class should not be called directly!")
 
 
 class Attenuator(object):
@@ -281,15 +286,14 @@
             IndexError if the index is out of range.
         """
         if not isinstance(instrument, AttenuatorInstrument):
-            raise TypeError('Must provide an Attenuator Instrument Ref')
+            raise TypeError("Must provide an Attenuator Instrument Ref")
         self.model = instrument.model
         self.instrument = instrument
         self.idx = idx
         self.offset = offset
 
         if self.idx >= instrument.num_atten:
-            raise IndexError(
-                'Attenuator index out of range for attenuator instrument')
+            raise IndexError("Attenuator index out of range for attenuator instrument")
 
     def set_atten(self, value, strict=True, retry=False):
         """Sets the attenuation.
@@ -305,13 +309,11 @@
             ValueError if value + offset is greater than the maximum value.
         """
         if value + self.offset > self.instrument.max_atten and strict:
-            raise ValueError(
-                'Attenuator Value+Offset greater than Max Attenuation!')
+            raise ValueError("Attenuator Value+Offset greater than Max Attenuation!")
 
-        self.instrument.set_atten(self.idx,
-                                  value + self.offset,
-                                  strict=strict,
-                                  retry=retry)
+        self.instrument.set_atten(
+            self.idx, value + self.offset, strict=strict, retry=retry
+        )
 
     def get_atten(self, retry=False):
         """Returns the attenuation as a float, normalized by the offset."""
@@ -320,7 +322,7 @@
     def get_max_atten(self):
         """Returns the max attenuation as a float, normalized by the offset."""
         if self.instrument.max_atten == AttenuatorInstrument.INVALID_MAX_ATTEN:
-            raise ValueError('Invalid Max Attenuator Value')
+            raise ValueError("Invalid Max Attenuator Value")
 
         return self.instrument.max_atten - self.offset
 
@@ -336,7 +338,7 @@
     small loops scattered throughout user code.
     """
 
-    def __init__(self, name=''):
+    def __init__(self, name=""):
         """This constructor for AttenuatorGroup
 
         Args:
@@ -366,7 +368,7 @@
             Requires a valid AttenuatorInstrument to be passed in.
         """
         if not instrument or not isinstance(instrument, AttenuatorInstrument):
-            raise TypeError('Must provide an Attenuator Instrument Ref')
+            raise TypeError("Must provide an Attenuator Instrument Ref")
 
         if type(indices) is range or type(indices) is list:
             for i in indices:
@@ -384,7 +386,7 @@
             TypeError if the attenuator parameter is not an Attenuator.
         """
         if not isinstance(attenuator, Attenuator):
-            raise TypeError('Must provide an Attenuator')
+            raise TypeError("Must provide an Attenuator")
 
         self.attens.append(attenuator)
 
diff --git a/src/antlion/controllers/attenuator_lib/_tnhelper.py b/src/antlion/controllers/attenuator_lib/_tnhelper.py
index 643ccdf..61b4193 100644
--- a/src/antlion/controllers/attenuator_lib/_tnhelper.py
+++ b/src/antlion/controllers/attenuator_lib/_tnhelper.py
@@ -26,7 +26,7 @@
 
 
 def _ascii_string(uc_string):
-    return str(uc_string).encode('ASCII')
+    return str(uc_string).encode("ASCII")
 
 
 class _TNHelper(object):
@@ -35,10 +35,8 @@
     It should only be used by those implementation control libraries and not by
     any user code directly.
     """
-    def __init__(self,
-                 tx_cmd_separator='\n',
-                 rx_cmd_separator='\n',
-                 prompt=''):
+
+    def __init__(self, tx_cmd_separator="\n", rx_cmd_separator="\n", prompt=""):
         self._tn = None
         self._ip_address = None
         self._port = None
@@ -77,37 +75,37 @@
             True when telnet server is reachable and telnet connection has been
             successfully reopened
         """
-        logging.debug('Diagnosing telnet connection')
+        logging.debug("Diagnosing telnet connection")
         try:
-            job_result = job.run('ping {} -c 5 -i 0.2'.format(
-                self._ip_address))
+            job_result = job.run("ping {} -c 5 -i 0.2".format(self._ip_address))
         except:
             logging.error("Unable to ping telnet server.")
             return False
         ping_output = job_result.stdout
-        if not re.search(r' 0% packet loss', ping_output):
-            logging.error('Ping Packets Lost. Result: {}'.format(ping_output))
+        if not re.search(r" 0% packet loss", ping_output):
+            logging.error("Ping Packets Lost. Result: {}".format(ping_output))
             return False
         try:
             self.close()
         except:
-            logging.error('Cannot close telnet connection.')
+            logging.error("Cannot close telnet connection.")
             return False
         try:
             self.open(self._ip_address, self._port)
         except:
-            logging.error('Cannot reopen telnet connection.')
+            logging.error("Cannot reopen telnet connection.")
             return False
-        logging.debug('Telnet connection likely recovered')
+        logging.debug("Telnet connection likely recovered")
         return True
 
     def cmd(self, cmd_str, wait_ret=True, retry=False):
         if not isinstance(cmd_str, str):
-            raise TypeError('Invalid command string', cmd_str)
+            raise TypeError("Invalid command string", cmd_str)
 
         if not self.is_open():
             raise attenuator.InvalidOperationError(
-                'Telnet connection not open for commands')
+                "Telnet connection not open for commands"
+            )
 
         cmd_str.strip(self.tx_cmd_separator)
         self._tn.read_until(_ascii_string(self.prompt), 2)
@@ -117,23 +115,25 @@
             return None
 
         match_idx, match_val, ret_text = self._tn.expect(
-            [_ascii_string('\S+' + self.rx_cmd_separator)], 1)
+            [_ascii_string("\S+" + self.rx_cmd_separator)], 1
+        )
 
-        logging.debug('Telnet Command: {}'.format(cmd_str))
-        logging.debug('Telnet Reply: ({},{},{})'.format(
-            match_idx, match_val, ret_text))
+        logging.debug("Telnet Command: {}".format(cmd_str))
+        logging.debug("Telnet Reply: ({},{},{})".format(match_idx, match_val, ret_text))
 
         if match_idx == -1:
             telnet_recovered = self.diagnose_telnet()
             if telnet_recovered and retry:
-                logging.debug('Retrying telnet command once.')
+                logging.debug("Retrying telnet command once.")
                 return self.cmd(cmd_str, wait_ret, retry=False)
             else:
                 raise attenuator.InvalidDataError(
-                    'Telnet command failed to return valid data')
+                    "Telnet command failed to return valid data"
+                )
 
         ret_text = ret_text.decode()
-        ret_text = ret_text.strip(self.tx_cmd_separator +
-                                  self.rx_cmd_separator + self.prompt)
+        ret_text = ret_text.strip(
+            self.tx_cmd_separator + self.rx_cmd_separator + self.prompt
+        )
 
         return ret_text
diff --git a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py b/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
index 9d38214..4c34f4b 100644
--- a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
+++ b/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
@@ -13,7 +13,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 """
 Class for Telnet control of Aeroflex 832X and 833X Series Attenuator Modules
 
@@ -30,13 +29,12 @@
 
 
 class AttenuatorInstrument(attenuator.AttenuatorInstrument):
-
     def __init__(self, num_atten=0):
         super(AttenuatorInstrument, self).__init__(num_atten)
 
-        self._tnhelper = _tnhelper._TNHelper(tx_cmd_separator='\r\n',
-                                             rx_cmd_separator='\r\n',
-                                             prompt='>')
+        self._tnhelper = _tnhelper._TNHelper(
+            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=">"
+        )
         self.properties = None
         self.address = None
 
@@ -52,19 +50,22 @@
         self._tnhelper.open(host, port)
 
         # work around a bug in IO, but this is a good thing to do anyway
-        self._tnhelper.cmd('*CLS', False)
+        self._tnhelper.cmd("*CLS", False)
         self.address = host
 
         if self.num_atten == 0:
-            self.num_atten = int(self._tnhelper.cmd('RFCONFIG? CHAN'))
+            self.num_atten = int(self._tnhelper.cmd("RFCONFIG? CHAN"))
 
-        configstr = self._tnhelper.cmd('RFCONFIG? ATTN 1')
+        configstr = self._tnhelper.cmd("RFCONFIG? ATTN 1")
 
-        self.properties = dict(zip(['model', 'max_atten', 'min_step',
-                                    'unknown', 'unknown2', 'cfg_str'],
-                                   configstr.split(", ", 5)))
+        self.properties = dict(
+            zip(
+                ["model", "max_atten", "min_step", "unknown", "unknown2", "cfg_str"],
+                configstr.split(", ", 5),
+            )
+        )
 
-        self.max_atten = float(self.properties['max_atten'])
+        self.max_atten = float(self.properties["max_atten"])
 
     def is_open(self):
         """Returns True if the AttenuatorInstrument has an open connection."""
@@ -95,17 +96,15 @@
                 attenuation value.
         """
         if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
+            raise attenuator.InvalidOperationError("Connection not open!")
 
         if idx >= self.num_atten:
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
         if value > self.max_atten:
-            raise ValueError('Attenuator value out of range!', self.max_atten,
-                             value)
+            raise ValueError("Attenuator value out of range!", self.max_atten, value)
 
-        self._tnhelper.cmd('ATTN ' + str(idx + 1) + ' ' + str(value), False)
+        self._tnhelper.cmd("ATTN " + str(idx + 1) + " " + str(value), False)
 
     def get_atten(self, idx, **_):
         """Returns the current attenuation of the attenuator at the given index.
@@ -120,12 +119,12 @@
             the current attenuation value as a float
         """
         if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
+            raise attenuator.InvalidOperationError("Connection not open!")
 
         #       Potentially redundant safety check removed for the moment
         #       if idx >= self.num_atten:
         #           raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
-        atten_val = self._tnhelper.cmd('ATTN? ' + str(idx + 1))
+        atten_val = self._tnhelper.cmd("ATTN? " + str(idx + 1))
 
         return float(atten_val)
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/http.py b/src/antlion/controllers/attenuator_lib/minicircuits/http.py
index c84f64b..61c1e29 100644
--- a/src/antlion/controllers/attenuator_lib/minicircuits/http.py
+++ b/src/antlion/controllers/attenuator_lib/minicircuits/http.py
@@ -57,18 +57,22 @@
         self._timeout = timeout
         self.address = host
 
-        att_req = urllib.request.urlopen('http://{}:{}/MN?'.format(
-            self._ip_address, self._port))
-        config_str = att_req.read().decode('utf-8').strip()
-        if not config_str.startswith('MN='):
+        att_req = urllib.request.urlopen(
+            "http://{}:{}/MN?".format(self._ip_address, self._port)
+        )
+        config_str = att_req.read().decode("utf-8").strip()
+        if not config_str.startswith("MN="):
             raise attenuator.InvalidDataError(
-                'Attenuator returned invalid data. Attenuator returned: {}'.
-                format(config_str))
+                "Attenuator returned invalid data. Attenuator returned: {}".format(
+                    config_str
+                )
+            )
 
-        config_str = config_str[len('MN='):]
+        config_str = config_str[len("MN=") :]
         self.properties = dict(
-            zip(['model', 'max_freq', 'max_atten'], config_str.split('-', 2)))
-        self.max_atten = float(self.properties['max_atten'])
+            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
+        )
+        self.max_atten = float(self.properties["max_atten"])
 
     def is_open(self):
         """Returns True if the AttenuatorInstrument has an open connection.
@@ -104,27 +108,28 @@
             expected output.
         """
         if not (0 <= idx < self.num_atten):
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
         if value > self.max_atten and strict:
-            raise ValueError('Attenuator value out of range!', self.max_atten,
-                             value)
+            raise ValueError("Attenuator value out of range!", self.max_atten, value)
         # The actual device uses one-based index for channel numbers.
         adjusted_value = min(max(0, value), self.max_atten)
         att_req = urllib.request.urlopen(
-            'http://{}:{}/CHAN:{}:SETATT:{}'.format(self._ip_address,
-                                                    self._port, idx + 1,
-                                                    adjusted_value),
-            timeout=self._timeout)
-        att_resp = att_req.read().decode('utf-8').strip()
-        if att_resp != '1':
+            "http://{}:{}/CHAN:{}:SETATT:{}".format(
+                self._ip_address, self._port, idx + 1, adjusted_value
+            ),
+            timeout=self._timeout,
+        )
+        att_resp = att_req.read().decode("utf-8").strip()
+        if att_resp != "1":
             if retry:
                 self.set_atten(idx, value, strict, retry=False)
             else:
                 raise attenuator.InvalidDataError(
-                    'Attenuator returned invalid data. Attenuator returned: {}'
-                    .format(att_resp))
+                    "Attenuator returned invalid data. Attenuator returned: {}".format(
+                        att_resp
+                    )
+                )
 
     def get_atten(self, idx, retry=False, **_):
         """Returns the current attenuation of the attenuator at the given index.
@@ -141,12 +146,12 @@
             the current attenuation value as a float
         """
         if not (0 <= idx < self.num_atten):
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
         att_req = urllib.request.urlopen(
-            'http://{}:{}/CHAN:{}:ATT?'.format(self._ip_address, self.port, idx + 1),
-            timeout=self._timeout)
-        att_resp = att_req.read().decode('utf-8').strip()
+            "http://{}:{}/CHAN:{}:ATT?".format(self._ip_address, self.port, idx + 1),
+            timeout=self._timeout,
+        )
+        att_resp = att_req.read().decode("utf-8").strip()
         try:
             atten_val = float(att_resp)
         except:
@@ -154,6 +159,8 @@
                 self.get_atten(idx, retry=False)
             else:
                 raise attenuator.InvalidDataError(
-                    'Attenuator returned invalid data. Attenuator returned: {}'
-                    .format(att_resp))
+                    "Attenuator returned invalid data. Attenuator returned: {}".format(
+                        att_resp
+                    )
+                )
         return atten_val
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py b/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
index 538532f..ad9f0ce 100644
--- a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
+++ b/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
@@ -40,9 +40,9 @@
 
     def __init__(self, num_atten=0):
         super(AttenuatorInstrument, self).__init__(num_atten)
-        self._tnhelper = _tnhelper._TNHelper(tx_cmd_separator='\r\n',
-                                             rx_cmd_separator='\r\n',
-                                             prompt='')
+        self._tnhelper = _tnhelper._TNHelper(
+            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=""
+        )
         self.address = None
 
     def __del__(self):
@@ -64,14 +64,15 @@
         if self.num_atten == 0:
             self.num_atten = 1
 
-        config_str = self._tnhelper.cmd('MN?')
+        config_str = self._tnhelper.cmd("MN?")
 
-        if config_str.startswith('MN='):
-            config_str = config_str[len('MN='):]
+        if config_str.startswith("MN="):
+            config_str = config_str[len("MN=") :]
 
         self.properties = dict(
-            zip(['model', 'max_freq', 'max_atten'], config_str.split('-', 2)))
-        self.max_atten = float(self.properties['max_atten'])
+            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
+        )
+        self.max_atten = float(self.properties["max_atten"])
 
     def is_open(self):
         """Returns True if the AttenuatorInstrument has an open connection."""
@@ -107,19 +108,16 @@
         """
 
         if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
+            raise attenuator.InvalidOperationError("Connection not open!")
 
         if idx >= self.num_atten:
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
         if value > self.max_atten and strict:
-            raise ValueError('Attenuator value out of range!', self.max_atten,
-                             value)
+            raise ValueError("Attenuator value out of range!", self.max_atten, value)
         # The actual device uses one-based index for channel numbers.
         adjusted_value = min(max(0, value), self.max_atten)
-        self._tnhelper.cmd('CHAN:%s:SETATT:%s' % (idx + 1, adjusted_value),
-                           retry=retry)
+        self._tnhelper.cmd("CHAN:%s:SETATT:%s" % (idx + 1, adjusted_value), retry=retry)
 
     def get_atten(self, idx, retry=False):
         """Returns the current attenuation of the attenuator at the given index.
@@ -135,16 +133,14 @@
             the current attenuation value as a float
         """
         if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
+            raise attenuator.InvalidOperationError("Connection not open!")
 
         if idx >= self.num_atten or idx < 0:
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
+            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
 
         if self.num_atten == 1:
-            atten_val_str = self._tnhelper.cmd(':ATT?', retry=retry)
+            atten_val_str = self._tnhelper.cmd(":ATT?", retry=retry)
         else:
-            atten_val_str = self._tnhelper.cmd('CHAN:%s:ATT?' % (idx + 1),
-                                               retry=retry)
+            atten_val_str = self._tnhelper.cmd("CHAN:%s:ATT?" % (idx + 1), retry=retry)
         atten_val = float(atten_val_str)
         return atten_val
diff --git a/src/antlion/controllers/bits.py b/src/antlion/controllers/bits.py
deleted file mode 100644
index 0a9ed21..0000000
--- a/src/antlion/controllers/bits.py
+++ /dev/null
@@ -1,470 +0,0 @@
-"""Module managing the required definitions for using the bits power monitor"""
-
-import logging
-import os
-import time
-import uuid
-
-from antlion import context
-from antlion.controllers import power_metrics
-from antlion.controllers import power_monitor
-from antlion.controllers.bits_lib import bits_client
-from antlion.controllers.bits_lib import bits_service
-from antlion.controllers.bits_lib import bits_service_config as bsc
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'Bits'
-ACTS_CONTROLLER_REFERENCE_NAME = 'bitses'
-
-
-def create(configs):
-    return [Bits(index, config) for (index, config) in enumerate(configs)]
-
-
-def destroy(bitses):
-    for bits in bitses:
-        bits.teardown()
-
-
-def get_info(bitses):
-    return [bits.config for bits in bitses]
-
-
-class BitsError(Exception):
-    pass
-
-
-class _BitsCollection(object):
-    """Object that represents a bits collection
-
-    Attributes:
-        name: The name given to the collection.
-        markers_buffer: An array of un-flushed markers, each marker is
-        represented by a bi-dimensional tuple with the format
-        (<nanoseconds_since_epoch or datetime>, <text>).
-        monsoon_output_path: A path to store monsoon-like data if possible, Bits
-        uses this path to attempt data extraction in monsoon format, if this
-        parameter is left as None such extraction is not attempted.
-    """
-
-    def __init__(self, name, monsoon_output_path=None):
-        self.monsoon_output_path = monsoon_output_path
-        self.name = name
-        self.markers_buffer = []
-
-    def add_marker(self, timestamp, marker_text):
-        self.markers_buffer.append((timestamp, marker_text))
-
-
-def _transform_name(bits_metric_name):
-    """Transform bits metrics names to a more succinct version.
-
-    Examples of bits_metrics_name as provided by the client:
-    - default_device.slider.C1_30__PP0750_L1S_VDD_G3D_M_P:mA,
-    - default_device.slider.C1_30__PP0750_L1S_VDD_G3D_M_P:mW,
-    - default_device.Monsoon.Monsoon:mA,
-    - default_device.Monsoon.Monsoon:mW,
-    - <device>.<collector>.<rail>:<unit>
-
-    Args:
-        bits_metric_name: A bits metric name.
-
-    Returns:
-        For monsoon metrics, and for backwards compatibility:
-          Monsoon:mA -> avg_current,
-          Monsoon:mW -> avg_power,
-
-        For everything else:
-          <rail>:mW -> <rail/rail>_avg_current
-          <rail>:mW -> <rail/rail>_avg_power
-          ...
-    """
-    prefix, unit = bits_metric_name.split(':')
-    rail = prefix.split('.')[-1]
-
-    if 'mW' == unit:
-        suffix = 'avg_power'
-    elif 'mA' == unit:
-        suffix = 'avg_current'
-    elif 'mV' == unit:
-        suffix = 'avg_voltage'
-    else:
-        logging.warning('unknown unit type for unit %s' % unit)
-        suffix = ''
-
-    if 'Monsoon' == rail:
-        return suffix
-    elif suffix == '':
-        return rail
-    else:
-        return '%s_%s' % (rail, suffix)
-
-
-def _raw_data_to_metrics(raw_data_obj):
-    data = raw_data_obj['data']
-    metrics = []
-    for sample in data:
-        unit = sample['unit']
-        if 'Msg' == unit:
-            continue
-        elif 'mW' == unit:
-            unit_type = 'power'
-        elif 'mA' == unit:
-            unit_type = 'current'
-        elif 'mV' == unit:
-            unit_type = 'voltage'
-        else:
-            logging.warning('unknown unit type for unit %s' % unit)
-            continue
-
-        name = _transform_name(sample['name'])
-        avg = sample['avg']
-        metrics.append(power_metrics.Metric(avg, unit_type, unit, name=name))
-
-    return metrics
-
-
-def _get_single_file(registry, key):
-    if key not in registry:
-        return None
-    entry = registry[key]
-    if isinstance(entry, str):
-        return entry
-    if isinstance(entry, list):
-        return None if len(entry) == 0 else entry[0]
-    raise ValueError('registry["%s"] is of unsupported type %s for this '
-                     'operation. Supported types are str and list.' % (
-                         key, type(entry)))
-
-
-class Bits(object):
-
-    ROOT_RAIL_KEY = 'RootRail'
-    ROOT_RAIL_DEFAULT_VALUE = 'Monsoon:mA'
-
-    def __init__(self, index, config):
-        """Creates an instance of a bits controller.
-
-        Args:
-            index: An integer identifier for this instance, this allows to
-                tell apart different instances in the case where multiple
-                bits controllers are being used concurrently.
-            config: The config as defined in the ACTS  BiTS controller config.
-                Expected format is:
-                {
-                    // optional
-                    'Monsoon':   {
-                        'serial_num': <serial number:int>,
-                        'monsoon_voltage': <voltage:double>
-                    }
-                    // optional
-                    'Kibble': [
-                        {
-                            'board': 'BoardName1',
-                            'connector': 'A',
-                            'serial': 'serial_1'
-                        },
-                        {
-                            'board': 'BoardName2',
-                            'connector': 'D',
-                            'serial': 'serial_2'
-                        }
-                    ]
-                    // optional
-                    'RootRail': 'Monsoon:mA'
-                }
-        """
-        self.index = index
-        self.config = config
-        self._service = None
-        self._client = None
-        self._active_collection = None
-        self._collections_counter = 0
-        self._root_rail = config.get(self.ROOT_RAIL_KEY,
-                                     self.ROOT_RAIL_DEFAULT_VALUE)
-
-    def setup(self, *_, registry=None, **__):
-        """Starts a bits_service in the background.
-
-        This function needs to be called with either a registry or after calling
-        power_monitor.update_registry, and it needs to be called before any other
-        method in this class.
-
-        Args:
-            registry: A dictionary with files used by bits. Format:
-                {
-                    // required, string or list of strings
-                    bits_service: ['/path/to/bits_service']
-
-                    // required, string or list of strings
-                    bits_client: ['/path/to/bits.par']
-
-                    // needed for monsoon, string or list of strings
-                    lvpm_monsoon: ['/path/to/lvpm_monsoon.par']
-
-                    // needed for monsoon, string or list of strings
-                    hvpm_monsoon: ['/path/to/hvpm_monsoon.par']
-
-                    // needed for kibble, string or list of strings
-                    kibble_bin: ['/path/to/kibble.par']
-
-                    // needed for kibble, string or list of strings
-                    kibble_board_file: ['/path/to/phone_s.board']
-
-                    // optional, string or list of strings
-                    vm_file: ['/path/to/file.vm']
-                }
-
-                All fields in this dictionary can be either a string or a list
-                of strings. If lists are passed, only their first element is
-                taken into account. The reason for supporting lists but only
-                acting on their first element is for easier integration with
-                harnesses that handle resources as lists.
-        """
-        if registry is None:
-            registry = power_monitor.get_registry()
-        if 'bits_service' not in registry:
-            raise ValueError('No bits_service binary has been defined in the '
-                             'global registry.')
-        if 'bits_client' not in registry:
-            raise ValueError('No bits_client binary has been defined in the '
-                             'global registry.')
-
-        bits_service_binary = _get_single_file(registry, 'bits_service')
-        bits_client_binary = _get_single_file(registry, 'bits_client')
-        lvpm_monsoon_bin = _get_single_file(registry, 'lvpm_monsoon')
-        hvpm_monsoon_bin = _get_single_file(registry, 'hvpm_monsoon')
-        kibble_bin = _get_single_file(registry, 'kibble_bin')
-        kibble_board_file = _get_single_file(registry, 'kibble_board_file')
-        vm_file = _get_single_file(registry, 'vm_file')
-        config = bsc.BitsServiceConfig(self.config,
-                                       lvpm_monsoon_bin=lvpm_monsoon_bin,
-                                       hvpm_monsoon_bin=hvpm_monsoon_bin,
-                                       kibble_bin=kibble_bin,
-                                       kibble_board_file=kibble_board_file,
-                                       virtual_metrics_file=vm_file)
-        output_log = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            'bits_service_out_%s.txt' % self.index)
-        service_name = 'bits_config_%s' % self.index
-
-        self._active_collection = None
-        self._collections_counter = 0
-        self._service = bits_service.BitsService(config,
-                                                 bits_service_binary,
-                                                 output_log,
-                                                 name=service_name,
-                                                 timeout=3600 * 24)
-        self._service.start()
-        self._client = bits_client.BitsClient(bits_client_binary,
-                                              self._service,
-                                              config)
-        # this call makes sure that the client can interact with the server.
-        devices = self._client.list_devices()
-        logging.debug(devices)
-
-    def disconnect_usb(self, *_, **__):
-        self._client.disconnect_usb()
-
-    def connect_usb(self, *_, **__):
-        self._client.connect_usb()
-
-    def measure(self, *_, measurement_args=None,
-                measurement_name=None, monsoon_output_path=None,
-                **__):
-        """Blocking function that measures power through bits for the specified
-        duration. Results need to be consulted through other methods such as
-        get_metrics or post processing files like the ones
-        generated at monsoon_output_path after calling `release_resources`.
-
-        Args:
-            measurement_args: A dictionary with the following structure:
-                {
-                   'duration': <seconds to measure for>
-                   'hz': <samples per second>
-                   'measure_after_seconds': <sleep time before measurement>
-                }
-                The actual number of samples per second is limited by the
-                bits configuration. The value of hz is defaulted to 1000.
-            measurement_name: A name to give to the measurement (which is also
-                used as the Bits collection name. Bits collection names (and
-                therefore measurement names) need to be unique within the
-                context of a Bits object.
-            monsoon_output_path: If provided this path will be used to generate
-                a monsoon like formatted file at the release_resources step.
-        """
-        if measurement_args is None:
-            raise ValueError('measurement_args can not be left undefined')
-
-        duration = measurement_args.get('duration')
-        if duration is None:
-            raise ValueError(
-                'duration can not be left undefined within measurement_args')
-
-        hz = measurement_args.get('hz', 1000)
-
-        # Delay the start of the measurement if an offset is required
-        measure_after_seconds = measurement_args.get('measure_after_seconds')
-        if measure_after_seconds:
-            time.sleep(measure_after_seconds)
-
-        if self._active_collection:
-            raise BitsError(
-                'Attempted to start a collection while there is still an '
-                'active one. Active collection: %s',
-                self._active_collection.name)
-
-        self._collections_counter = self._collections_counter + 1
-        # The name gets a random 8 characters salt suffix because the Bits
-        # client has a bug where files with the same name are considered to be
-        # the same collection and it won't load two files with the same name.
-        # b/153170987 b/153944171
-        if not measurement_name:
-            measurement_name = 'bits_collection_%s_%s' % (
-                str(self._collections_counter), str(uuid.uuid4())[0:8])
-
-        self._active_collection = _BitsCollection(measurement_name,
-                                                  monsoon_output_path)
-        self._client.start_collection(self._active_collection.name,
-                                      default_sampling_rate=hz)
-        time.sleep(duration)
-
-    def get_metrics(self, *_, timestamps=None, **__):
-        """Gets metrics for the segments delimited by the timestamps dictionary.
-
-        Must be called before releasing resources, otherwise it will fail adding
-        markers to the collection.
-
-        Args:
-            timestamps: A dictionary of the shape:
-                {
-                    'segment_name': {
-                        'start' : <milliseconds_since_epoch> or <datetime>
-                        'end': <milliseconds_since_epoch> or <datetime>
-                    }
-                    'another_segment': {
-                        'start' : <milliseconds_since_epoch> or <datetime>
-                        'end': <milliseconds_since_epoch> or <datetime>
-                    }
-                }
-        Returns:
-            A dictionary of the shape:
-                {
-                    'segment_name': <list of power_metrics.Metric>
-                    'another_segment': <list of power_metrics.Metric>
-                }
-        """
-        if timestamps is None:
-            raise ValueError('timestamps dictionary can not be left undefined')
-
-        metrics = {}
-
-        for segment_name, times in timestamps.items():
-            if 'start' not in times or 'end' not in times:
-                continue
-
-            start = times['start']
-            end = times['end']
-
-            # bits accepts nanoseconds only, but since this interface needs to
-            # backwards compatible with monsoon which works with milliseconds we
-            # require to do a conversion from milliseconds to nanoseconds.
-            # The preferred way for new calls to this function should be using
-            # datetime instead which is unambiguous
-            if isinstance(start, (int, float)):
-                start = start * 1e6
-            if isinstance(end, (int, float)):
-                end = end * 1e6
-
-            raw_metrics = self._client.get_metrics(self._active_collection.name,
-                                                   start=start, end=end)
-            self._add_marker(start, 'start - %s' % segment_name)
-            self._add_marker(end, 'end - %s' % segment_name)
-            metrics[segment_name] = _raw_data_to_metrics(raw_metrics)
-        return metrics
-
-    def _add_marker(self, timestamp, marker_text):
-        if not self._active_collection:
-            raise BitsError(
-                'markers can not be added without an active collection')
-        self._active_collection.add_marker(timestamp, marker_text)
-
-    def release_resources(self):
-        """Performs all the cleanup and export tasks.
-
-        In the way that Bits' is interfaced several tasks can not be performed
-        while a collection is still active (like exporting the data) and others
-        can only take place while the collection is still active (like adding
-        markers to a collection).
-
-        To workaround this unique workflow, the collections that are started
-        with the 'measure' method are not really stopped after the method
-        is unblocked, it is only stopped after this method is called.
-
-        All the export files (.7z.bits and monsoon-formatted file) are also
-        generated in this method.
-        """
-        if not self._active_collection:
-            raise BitsError(
-                'Attempted to stop a collection without starting one')
-        self._client.add_markers(self._active_collection.name,
-                                 self._active_collection.markers_buffer)
-        self._client.stop_collection(self._active_collection.name)
-
-        export_file = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            '%s.7z.bits' % self._active_collection.name)
-        self._client.export(self._active_collection.name, export_file)
-        if self._active_collection.monsoon_output_path:
-            self._attempt_monsoon_format()
-        self._active_collection = None
-
-    def _attempt_monsoon_format(self):
-        """Attempts to create a monsoon-formatted file.
-
-        In the case where there is not enough information to retrieve a
-        monsoon-like file, this function will do nothing.
-        """
-        available_channels = self._client.list_channels(
-            self._active_collection.name)
-        milli_amps_channel = None
-
-        for channel in available_channels:
-            if channel.endswith(self._root_rail):
-                milli_amps_channel = self._root_rail
-                break
-
-        if milli_amps_channel is None:
-            logging.debug('No monsoon equivalent channels were found when '
-                          'attempting to recreate monsoon file format. '
-                          'Available channels were: %s',
-                          str(available_channels))
-            return
-
-        logging.debug('Recreating monsoon file format from channel: %s',
-                      milli_amps_channel)
-        self._client.export_as_monsoon_format(
-            self._active_collection.monsoon_output_path,
-            self._active_collection.name,
-            milli_amps_channel)
-
-    def get_waveform(self, file_path=None):
-        """Parses a file generated in release_resources.
-
-        Args:
-            file_path: Path to a waveform file.
-
-        Returns:
-            A list of tuples in which the first element is a timestamp and the
-            second element is the sampled current at that time.
-        """
-        if file_path is None:
-            raise ValueError('file_path can not be None')
-
-        return list(power_metrics.import_raw_data(file_path))
-
-    def teardown(self):
-        if self._service is None:
-            return
-
-        if self._service.service_state == bits_service.BitsServiceStates.STARTED:
-            self._service.stop()
diff --git a/src/antlion/controllers/bits_lib/__init__.py b/src/antlion/controllers/bits_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/bits_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/bits_lib/bits_client.py b/src/antlion/controllers/bits_lib/bits_client.py
deleted file mode 100644
index c68aafd..0000000
--- a/src/antlion/controllers/bits_lib/bits_client.py
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import csv
-from datetime import datetime
-import logging
-import tempfile
-
-from antlion.libs.proc import job
-import yaml
-
-
-class BitsClientError(Exception):
-    pass
-
-
-# An arbitrary large number of seconds.
-ONE_YEAR = str(3600 * 24 * 365)
-EPOCH = datetime.utcfromtimestamp(0)
-
-
-def _to_ns(timestamp):
-    """Returns the numerical value of a timestamp in nanoseconds since epoch.
-
-    Args:
-        timestamp: Either a number or a datetime.
-
-    Returns:
-        Rounded timestamp if timestamp is numeric, number of nanoseconds since
-        epoch if timestamp is instance of datetime.datetime.
-    """
-    if isinstance(timestamp, datetime):
-        return int((timestamp - EPOCH).total_seconds() * 1e9)
-    elif isinstance(timestamp, (float, int)):
-        return int(timestamp)
-    raise ValueError('%s can not be converted to a numerical representation of '
-                     'nanoseconds.' % type(timestamp))
-
-
-class BitsClient(object):
-    """Helper class to issue bits' commands"""
-
-    def __init__(self, binary, service, service_config):
-        """Constructs a BitsClient.
-
-        Args:
-            binary: The location of the bits.par client binary.
-            service: A bits_service.BitsService object. The service is expected
-              to be previously setup.
-            service_config: The bits_service_config.BitsService object used to
-              start the service on service_port.
-        """
-        self._log = logging.getLogger()
-        self._binary = binary
-        self._service = service
-        self._server_config = service_config
-
-    def _acquire_monsoon(self):
-        """Gets hold of a Monsoon so no other processes can use it.
-        Only works if there is a monsoon."""
-        self._log.debug('acquiring monsoon')
-        self.run_cmd('--collector',
-                     'Monsoon',
-                     '--collector_cmd',
-                     'acquire_monsoon', timeout=10)
-
-    def _release_monsoon(self):
-        self._log.debug('releasing monsoon')
-        self.run_cmd('--collector',
-                     'Monsoon',
-                     '--collector_cmd',
-                     'release_monsoon', timeout=10)
-
-    def run_cmd(self, *args, timeout=60):
-        """Executes a generic bits.par command.
-
-        Args:
-            args: A bits.par command as a tokenized array. The path to the
-              binary and the service port are provided by default, cmd should
-              only contain the remaining tokens of the desired command.
-            timeout: Number of seconds to wait for the command to finish before
-              forcibly killing it.
-        """
-        result = job.run([self._binary, '--port',
-                          self._service.port] + [str(arg) for arg in args],
-                         timeout=timeout)
-        return result.stdout
-
-    def export(self, collection_name, path):
-        """Exports a collection to its bits persistent format.
-
-        Exported files can be shared and opened through the Bits UI.
-
-        Args:
-            collection_name: Collection to be exported.
-            path: Where the resulting file should be created. Bits requires that
-            the resulting file ends in .7z.bits.
-        """
-        if not path.endswith('.7z.bits'):
-            raise BitsClientError('Bits\' collections can only be exported to '
-                                  'files ending in .7z.bits, got %s' % path)
-        self._log.debug('exporting collection %s to %s',
-                        collection_name,
-                        path)
-        self.run_cmd('--name',
-                     collection_name,
-                     '--ignore_gaps',
-                     '--export',
-                     '--export_path',
-                     path,
-                     timeout=600)
-
-    def export_as_csv(self, channels, collection_name, output_file):
-        """Export bits data as CSV.
-
-        Writes the selected channel data to the given output_file. Note that
-        the first line of the file contains headers.
-
-        Args:
-          channels: A list of string pattern matches for the channel to be
-            retrieved. For example, ":mW" will export all power channels,
-            ":mV" will export all voltage channels, "C1_01__" will export
-            power/voltage/current for the first fail of connector 1.
-          collection_name: A string for a collection that is sampling.
-          output_file: A string file path where the CSV will be written.
-        """
-        channels_arg = ','.join(channels)
-        cmd = ['--csvfile',
-               output_file,
-               '--name',
-               collection_name,
-               '--ignore_gaps',
-               '--csv_rawtimestamps',
-               '--channels',
-               channels_arg]
-        if self._server_config.has_virtual_metrics_file:
-            cmd = cmd + ['--vm_file', 'default']
-        self._log.debug(
-            'exporting csv for collection %s to %s, with channels %s',
-            collection_name, output_file, channels_arg)
-        self.run_cmd(*cmd, timeout=600)
-
-    def add_markers(self, collection_name, markers):
-        """Appends markers to a collection.
-
-        These markers are displayed in the Bits UI and are useful to label
-        important test events.
-
-        Markers can only be added to collections that have not been
-        closed / stopped. Markers need to be added in chronological order,
-        this function ensures that at least the markers added in each
-        call are sorted in chronological order, but if this function
-        is called multiple times, then is up to the user to ensure that
-        the subsequent batches of markers are for timestamps higher (newer)
-        than all the markers passed in previous calls to this function.
-
-        Args:
-            collection_name: The name of the collection to add markers to.
-            markers: A list of tuples of the shape:
-
-             [(<nano_seconds_since_epoch or datetime>, <marker text>),
-              (<nano_seconds_since_epoch or datetime>, <marker text>),
-              (<nano_seconds_since_epoch or datetime>, <marker text>),
-              ...
-            ]
-        """
-        # sorts markers in chronological order before adding them. This is
-        # required by go/pixel-bits
-        for ts, marker in sorted(markers, key=lambda x: _to_ns(x[0])):
-            self._log.debug('Adding marker at %s: %s', str(ts), marker)
-            self.run_cmd('--name',
-                         collection_name,
-                         '--log_ts',
-                         str(_to_ns(ts)),
-                         '--log',
-                         marker,
-                         timeout=10)
-
-    def get_metrics(self, collection_name, start=None, end=None):
-        """Extracts metrics for a period of time.
-
-        Args:
-            collection_name: The name of the collection to get metrics from
-            start: Numerical nanoseconds since epoch until the start of the
-            period of interest or datetime. If not provided, start will be the
-            beginning of the collection.
-            end: Numerical nanoseconds since epoch until the end of the
-            period of interest or datetime. If not provided, end will be the
-            end of the collection.
-        """
-        with tempfile.NamedTemporaryFile(prefix='bits_metrics') as tf:
-            cmd = ['--name',
-                   collection_name,
-                   '--ignore_gaps',
-                   '--aggregates_yaml_path',
-                   tf.name]
-
-            if start is not None:
-                cmd = cmd + ['--abs_start_time', str(_to_ns(start))]
-            if end is not None:
-                cmd = cmd + ['--abs_stop_time', str(_to_ns(end))]
-            if self._server_config.has_virtual_metrics_file:
-                cmd = cmd + ['--vm_file', 'default']
-
-            self.run_cmd(*cmd)
-            with open(tf.name) as mf:
-                self._log.debug(
-                    'bits aggregates for collection %s [%s-%s]: %s' % (
-                        collection_name, start, end,
-                        mf.read()))
-
-            with open(tf.name) as mf:
-                return yaml.safe_load(mf)
-
-    def disconnect_usb(self):
-        """Disconnects the monsoon's usb. Only works if there is a monsoon"""
-        self._log.debug('disconnecting monsoon\'s usb')
-        self.run_cmd('--collector',
-                     'Monsoon',
-                     '--collector_cmd',
-                     'usb_disconnect', timeout=10)
-
-    def start_collection(self, collection_name, default_sampling_rate=1000):
-        """Indicates Bits to start a collection.
-
-        Args:
-            collection_name: Name to give to the collection to be started.
-            Collection names must be unique at Bits' service level. If multiple
-            collections must be taken within the context of the same Bits'
-            service, ensure that each collection is given a different one.
-            default_sampling_rate: Samples per second to be collected
-        """
-
-        cmd = ['--name',
-               collection_name,
-               '--non_blocking',
-               '--time',
-               ONE_YEAR,
-               '--default_sampling_rate',
-               str(default_sampling_rate)]
-
-        if self._server_config.has_kibbles:
-            cmd = cmd + ['--disk_space_saver']
-
-        self._log.debug('starting collection %s', collection_name)
-        self.run_cmd(*cmd, timeout=10)
-
-    def connect_usb(self):
-        """Connects the monsoon's usb. Only works if there is a monsoon."""
-        cmd = ['--collector',
-               'Monsoon',
-               '--collector_cmd',
-               'usb_connect']
-        self._log.debug('connecting monsoon\'s usb')
-        self.run_cmd(*cmd, timeout=10)
-
-    def stop_collection(self, collection_name):
-        """Stops the active collection."""
-        self._log.debug('stopping collection %s', collection_name)
-        self.run_cmd('--name',
-                     collection_name,
-                     '--stop')
-        self._log.debug('stopped collection %s', collection_name)
-
-    def list_devices(self):
-        """Lists devices managed by the bits_server this client is connected
-        to.
-
-        Returns:
-            bits' output when called with --list devices.
-        """
-        self._log.debug('listing devices')
-        result = self.run_cmd('--list', 'devices', timeout=20)
-        return result
-
-    def list_channels(self, collection_name):
-        """Finds all the available channels in a given collection.
-
-        Args:
-            collection_name: The name of the collection to get channels from.
-        """
-        metrics = self.get_metrics(collection_name)
-        return [channel['name'] for channel in metrics['data']]
-
-    def export_as_monsoon_format(self, dest_path, collection_name,
-                                 channel_pattern):
-        """Exports data from a collection in monsoon style.
-
-        This function exists because there are tools that have been built on
-        top of the monsoon format. To be able to leverage such tools we need
-        to make the data compliant with the format.
-
-        The monsoon format is:
-
-        <time_since_epoch_in_secs> <amps>
-
-        Args:
-            dest_path: Path where the resulting file will be generated.
-            collection_name: The name of the Bits' collection to export data
-            from.
-            channel_pattern: A regex that matches the Bits' channel to be used
-            as source of data. If there are multiple matching channels, only the
-            first one will be used. The channel is always assumed to be
-            expressed en milli-amps, the resulting format requires amps, so the
-            values coming from the first matching channel will always be
-            multiplied by 1000.
-        """
-        with tempfile.NamedTemporaryFile(prefix='bits_csv_') as tmon:
-            self.export_as_csv([channel_pattern], collection_name, tmon.name)
-
-            self._log.debug(
-                'massaging bits csv to monsoon format for collection'
-                ' %s', collection_name)
-            with open(tmon.name) as csv_file:
-                reader = csv.reader(csv_file)
-                headers = next(reader)
-                self._log.debug('csv headers %s', headers)
-                with open(dest_path, 'w') as dest:
-                    for row in reader:
-                        ts = float(row[0]) / 1e9
-                        amps = float(row[1]) / 1e3
-                        dest.write('%.7f %.12f\n' % (ts, amps))
diff --git a/src/antlion/controllers/bits_lib/bits_service.py b/src/antlion/controllers/bits_lib/bits_service.py
deleted file mode 100644
index ad2f660..0000000
--- a/src/antlion/controllers/bits_lib/bits_service.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import atexit
-import json
-import logging
-import os
-import re
-import signal
-import tempfile
-import time
-
-from enum import Enum
-
-from antlion import context
-from antlion.libs.proc import job
-from antlion.libs.proc import process
-
-
-class BitsServiceError(Exception):
-    pass
-
-
-class BitsServiceStates(Enum):
-    NOT_STARTED = 'not-started'
-    STARTED = 'started'
-    STOPPED = 'stopped'
-
-
-class BitsService(object):
-    """Helper class to start and stop a bits service
-
-    Attributes:
-        port: When the service starts the port it was assigned to is made
-        available for external agents to reference to the background service.
-        config: The BitsServiceConfig used to configure this service.
-        name: A free form string.
-        service_state: A BitsServiceState that represents the service state.
-    """
-
-    def __init__(self, config, binary, output_log_path,
-                 name='bits_service_default',
-                 timeout=None):
-        """Creates a BitsService object.
-
-        Args:
-            config: A BitsServiceConfig.
-            described in go/pixel-bits/user-guide/service/configuration.md
-            binary: Path to a bits_service binary.
-            output_log_path: Full path to where the resulting logs should be
-            stored.
-            name: Optional string to identify this service by. This
-            is used as reference in logs to tell this service apart from others
-            running in parallel.
-            timeout: Maximum time in seconds the service should be allowed
-            to run in the background after start. If left undefined the service
-            in the background will not time out.
-        """
-        self.name = name
-        self.port = None
-        self.config = config
-        self.service_state = BitsServiceStates.NOT_STARTED
-        self._timeout = timeout
-        self._binary = binary
-        self._log = logging.getLogger()
-        self._process = None
-        self._output_log = open(output_log_path, 'w')
-        self._collections_dir = tempfile.TemporaryDirectory(
-            prefix='bits_service_collections_dir_')
-        self._cleaned_up = False
-        atexit.register(self._atexit_cleanup)
-
-    def _atexit_cleanup(self):
-        if not self._cleaned_up:
-            self._log.error('Cleaning up bits_service %s at exit.', self.name)
-            self._cleanup()
-
-    def _write_extra_debug_logs(self):
-        dmesg_log = '%s.dmesg.txt' % self._output_log.name
-        dmesg = job.run(['dmesg', '-e'], ignore_status=True)
-        with open(dmesg_log, 'w') as f:
-            f.write(dmesg.stdout)
-
-        free_log = '%s.free.txt' % self._output_log.name
-        free = job.run(['free', '-m'], ignore_status=True)
-        with open(free_log, 'w') as f:
-            f.write(free.stdout)
-
-        df_log = '%s.df.txt' % self._output_log.name
-        df = job.run(['df', '-h'], ignore_status=True)
-        with open(df_log, 'w') as f:
-            f.write(df.stdout)
-
-    def _cleanup(self):
-        self._write_extra_debug_logs()
-        self.port = None
-        self._collections_dir.cleanup()
-        if self._process and self._process.is_running():
-            self._process.signal(signal.SIGINT)
-            self._log.debug('SIGINT sent to bits_service %s.' % self.name)
-            self._process.wait(kill_timeout=60.0)
-            self._log.debug('bits_service %s has been stopped.' % self.name)
-        self._output_log.close()
-        if self.config.has_monsoon:
-            job.run([self.config.monsoon_config.monsoon_binary,
-                     '--serialno',
-                     str(self.config.monsoon_config.serial_num),
-                     '--usbpassthrough',
-                     'on'],
-                    timeout=10)
-        self._cleaned_up = True
-
-    def _service_started_listener(self, line):
-        if self.service_state is BitsServiceStates.STARTED:
-            return
-        if 'Started server!' in line and self.port is not None:
-            self.service_state = BitsServiceStates.STARTED
-
-    PORT_PATTERN = re.compile(r'.*Server listening on .*:(\d+)\.$')
-
-    def _service_port_listener(self, line):
-        if self.port is not None:
-            return
-        match = self.PORT_PATTERN.match(line)
-        if match:
-            self.port = match.group(1)
-
-    def _output_callback(self, line):
-        self._output_log.write(line)
-        self._output_log.write('\n')
-        self._service_port_listener(line)
-        self._service_started_listener(line)
-
-    def _trigger_background_process(self, binary):
-        config_path = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            '%s.config.json' % self.name)
-        with open(config_path, 'w') as f:
-            f.write(json.dumps(self.config.config_dic, indent=2))
-
-        cmd = [binary,
-               '--port',
-               '0',
-               '--collections_folder',
-               self._collections_dir.name,
-               '--collector_config_file',
-               config_path]
-
-        # bits_service only works on linux systems, therefore is safe to assume
-        # that 'timeout' will be available.
-        if self._timeout:
-            cmd = ['timeout',
-                   '--signal=SIGTERM',
-                   '--kill-after=60',
-                   str(self._timeout)] + cmd
-
-        self._process = process.Process(cmd)
-        self._process.set_on_output_callback(self._output_callback)
-        self._process.set_on_terminate_callback(self._on_terminate)
-        self._process.start()
-
-    def _on_terminate(self, *_):
-        self._log.error('bits_service %s stopped unexpectedly.', self.name)
-        self._cleanup()
-
-    def start(self):
-        """Starts the bits service in the background.
-
-        This function blocks until the background service signals that it has
-        successfully started. A BitsServiceError is raised if the signal is not
-        received.
-        """
-        if self.service_state is BitsServiceStates.STOPPED:
-            raise BitsServiceError(
-                'bits_service %s was already stopped. A stopped'
-                ' service can not be started again.' % self.name)
-
-        if self.service_state is BitsServiceStates.STARTED:
-            raise BitsServiceError(
-                'bits_service %s has already been started.' % self.name)
-
-        self._log.info('starting bits_service %s', self.name)
-        self._trigger_background_process(self._binary)
-
-        # wait 40 seconds for the service to be ready.
-        max_startup_wait = time.time() + 40
-        while time.time() < max_startup_wait:
-            if self.service_state is BitsServiceStates.STARTED:
-                self._log.info('bits_service %s started on port %s', self.name,
-                               self.port)
-                return
-            time.sleep(0.1)
-
-        self._log.error('bits_service %s did not start on time, starting '
-                        'service teardown and raising a BitsServiceError.')
-        self._cleanup()
-        raise BitsServiceError(
-            'bits_service %s did not start successfully' % self.name)
-
-    def stop(self):
-        """Stops the bits service."""
-        if self.service_state is BitsServiceStates.STOPPED:
-            raise BitsServiceError(
-                'bits_service %s has already been stopped.' % self.name)
-        port = self.port
-        self._log.info('stopping bits_service %s on port %s', self.name, port)
-        self.service_state = BitsServiceStates.STOPPED
-        self._cleanup()
-        self._log.info('bits_service %s on port %s was stopped', self.name,
-                       port)
diff --git a/src/antlion/controllers/bits_lib/bits_service_config.py b/src/antlion/controllers/bits_lib/bits_service_config.py
deleted file mode 100644
index 1900869..0000000
--- a/src/antlion/controllers/bits_lib/bits_service_config.py
+++ /dev/null
@@ -1,249 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-
-DEFAULT_MONSOON_CONFIG_DICT = {
-    'enabled': 1,
-    'type': 'monsooncollector',
-    'monsoon_reset': 0,
-    # maximum monsoon sample rate that works best for both lvpm and hvpm
-    'sampling_rate': 1000,
-}
-
-
-class _BitsMonsoonConfig(object):
-    """Helper object to construct a bits_service config from a monsoon config as
-    defined for the bits controller config and required additional resources,
-    such as paths to executables.
-
-    The format for the bits_service's monsoon configuration is explained at:
-    http://go/pixel-bits/user-guide/service/collectors/monsoon
-
-    Attributes:
-        config_dic: A bits_service's monsoon configuration as a python
-        dictionary.
-    """
-
-    def __init__(self, monsoon_config, lvpm_monsoon_bin=None,
-                 hvpm_monsoon_bin=None):
-        """Constructs _BitsServiceMonsoonConfig.
-
-        Args:
-            monsoon_config: The monsoon config as defined in the
-                ACTS Bits controller config. Expected format is:
-                  { 'serial_num': <serial number:int>,
-                    'monsoon_voltage': <voltage:double> }
-            lvpm_monsoon_bin: Binary file to interact with low voltage monsoons.
-                Needed if the monsoon is a lvpm monsoon (serial number lower
-                than 20000).
-            hvpm_monsoon_bin: Binary file to interact with high voltage
-                monsoons. Needed if the monsoon is a hvpm monsoon (serial number
-                greater than 20000).
-        """
-        if 'serial_num' not in monsoon_config:
-            raise ValueError(
-                'Monsoon serial_num can not be undefined. Received '
-                'config was: %s' % monsoon_config)
-        if 'monsoon_voltage' not in monsoon_config:
-            raise ValueError('Monsoon voltage can not be undefined. Received '
-                             'config was: %s' % monsoon_config)
-
-        self.serial_num = int(monsoon_config['serial_num'])
-        self.monsoon_voltage = float(monsoon_config['monsoon_voltage'])
-
-        self.config_dic = copy.deepcopy(DEFAULT_MONSOON_CONFIG_DICT)
-        if float(self.serial_num) >= 20000:
-            self.config_dic['hv_monsoon'] = 1
-            if hvpm_monsoon_bin is None:
-                raise ValueError('hvpm_monsoon binary is needed but was None. '
-                                 'Received config was: %s' % monsoon_config)
-            self.monsoon_binary = hvpm_monsoon_bin
-        else:
-            self.config_dic['hv_monsoon'] = 0
-            if lvpm_monsoon_bin is None:
-                raise ValueError('lvpm_monsoon binary is needed but was None. '
-                                 'Received config was: %s' % monsoon_config)
-            self.monsoon_binary = lvpm_monsoon_bin
-
-        self.config_dic['monsoon_binary_path'] = self.monsoon_binary
-        self.config_dic['monsoon_voltage'] = self.monsoon_voltage
-        self.config_dic['serial_num'] = self.serial_num
-
-
-DEFAULT_KIBBLES_BOARD_CONFIG = {
-    'enabled': 1,
-    'type': 'kibblecollector',
-    'attached_kibbles': {}
-}
-
-DEFAULT_KIBBLE_CONFIG = {
-    'ultra_channels_current_hz': 976.5625,
-    'ultra_channels_voltage_hz': 976.5625,
-    'high_channels_current_hz': 976.5625,
-    'high_channels_voltage_hz': 976.5625
-}
-
-
-class _BitsKibblesConfig(object):
-    def __init__(self, kibbles_config, kibble_bin, kibble_board_file):
-        """Constructs _BitsKibblesConfig.
-
-        Args:
-            kibbles_config: A list of compacted kibble boards descriptions.
-                Expected format is:
-                    [{
-                        'board': 'BoardName1',
-                        'connector': 'A',
-                        'serial': 'serial_1'
-                     },
-                    {
-                        'board': 'BoardName2',
-                        'connector': 'D',
-                        'serial': 'serial_2'
-                    }]
-                More details can be found at go/acts-bits.
-            kibble_bin: Binary file to interact with kibbles.
-            kibble_board_file: File describing the distribution of rails on a
-                kibble. go/kibble#setting-up-bits-board-files
-        """
-
-        if not isinstance(kibbles_config, list):
-            raise ValueError(
-                'kibbles_config must be a list. Got %s.' % kibbles_config)
-
-        if kibble_bin is None:
-            raise ValueError('Kibbles were present in the config but no '
-                             'kibble_bin was provided')
-        if kibble_board_file is None:
-            raise ValueError('Kibbles were present in the config but no '
-                             'kibble_board_file was provided')
-
-        self.boards_configs = {}
-
-        for kibble in kibbles_config:
-            if 'board' not in kibble:
-                raise ValueError('An individual kibble config must have a '
-                                 'board')
-            if 'connector' not in kibble:
-                raise ValueError('An individual kibble config must have a '
-                                 'connector')
-            if 'serial' not in kibble:
-                raise ValueError('An individual kibble config must have a '
-                                 'serial')
-
-            board = kibble['board']
-            connector = kibble['connector']
-            serial = kibble['serial']
-            if board not in self.boards_configs:
-                self.boards_configs[board] = copy.deepcopy(
-                    DEFAULT_KIBBLES_BOARD_CONFIG)
-                self.boards_configs[board][
-                    'board_file'] = kibble_board_file
-                self.boards_configs[board]['kibble_py'] = kibble_bin
-            kibble_config = copy.deepcopy(DEFAULT_KIBBLE_CONFIG)
-            kibble_config['connector'] = connector
-            self.boards_configs[board]['attached_kibbles'][
-                serial] = kibble_config
-
-
-DEFAULT_SERVICE_CONFIG_DICT = {
-    'devices': {
-        'default_device': {
-            'enabled': 1,
-            'collectors': {}
-        }
-    }
-}
-
-
-class BitsServiceConfig(object):
-    """Helper object to construct a bits_service config from a bits controller
-    config and required additional resources, such as paths to executables.
-
-    The format for bits_service's configuration is explained in:
-    go/pixel-bits/user-guide/service/configuration.md
-
-    Attributes:
-        config_dic: A bits_service configuration as a python dictionary.
-    """
-
-    def __init__(self, controller_config, lvpm_monsoon_bin=None,
-                 hvpm_monsoon_bin=None, kibble_bin=None,
-                 kibble_board_file=None, virtual_metrics_file=None):
-        """Creates a BitsServiceConfig.
-
-        Args:
-            controller_config: The config as defined in the ACTS  BiTS
-                controller config. Expected format is:
-                {
-                    // optional
-                    'Monsoon':   {
-                        'serial_num': <serial number:int>,
-                        'monsoon_voltage': <voltage:double>
-                    }
-                    // optional
-                    'Kibble': [
-                        {
-                            'board': 'BoardName1',
-                            'connector': 'A',
-                            'serial': 'serial_1'
-                        },
-                        {
-                            'board': 'BoardName2',
-                            'connector': 'D',
-                            'serial': 'serial_2'
-                        }
-                    ]
-                }
-            lvpm_monsoon_bin: Binary file to interact with low voltage monsoons.
-                Needed if the monsoon is a lvpm monsoon (serial number lower
-                than 20000).
-            hvpm_monsoon_bin: Binary file to interact with high voltage
-                monsoons. Needed if the monsoon is a hvpm monsoon (serial number
-                greater than 20000).
-            kibble_bin: Binary file to interact with kibbles.
-            kibble_board_file: File describing the distribution of rails on a
-                kibble. go/kibble#setting-up-bits-board-files
-            virtual_metrics_file: A list of virtual metrics files to add
-                data aggregates on top of regular channel aggregates.
-                go/pixel-bits/user-guide/virtual-metrics
-        """
-        self.config_dic = copy.deepcopy(DEFAULT_SERVICE_CONFIG_DICT)
-        self.has_monsoon = False
-        self.has_kibbles = False
-        self.has_virtual_metrics_file = False
-        self.monsoon_config = None
-        self.kibbles_config = None
-        if 'Monsoon' in controller_config:
-            self.has_monsoon = True
-            self.monsoon_config = _BitsMonsoonConfig(
-                controller_config['Monsoon'],
-                lvpm_monsoon_bin,
-                hvpm_monsoon_bin)
-            self.config_dic['devices']['default_device']['collectors'][
-                'Monsoon'] = self.monsoon_config.config_dic
-        if 'Kibbles' in controller_config:
-            self.has_kibbles = True
-            self.kibbles_config = _BitsKibblesConfig(
-                controller_config['Kibbles'],
-                kibble_bin, kibble_board_file)
-            self.config_dic['devices']['default_device']['collectors'].update(
-                self.kibbles_config.boards_configs)
-            if virtual_metrics_file is not None:
-                self.config_dic['devices']['default_device'][
-                    'vm_files'] = [virtual_metrics_file]
-                self.has_virtual_metrics_file = True
diff --git a/src/antlion/controllers/bluetooth_pts_device.py b/src/antlion/controllers/bluetooth_pts_device.py
deleted file mode 100644
index 1dbe5c7..0000000
--- a/src/antlion/controllers/bluetooth_pts_device.py
+++ /dev/null
@@ -1,764 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Prerequisites:
-    Windows 10
-    Bluetooth PTS installed
-    Recommended: Running cmder as Admin: https://cmder.net/
-
-### BEGIN SETUP STEPS###
-1. Install latest version of Python for windows:
-    https://www.python.org/downloads/windows/
-
-Tested successfully on Python 3.7.3.:
-    https://www.python.org/ftp/python/3.7.3/python-3.7.3.exe
-
-2. Launch Powershell and setup PATH:
-Setx PATH “%PATH%;C:/Users/<username>/AppData/Local/Programs/Python/Python37-32/Scripts”
-
-3. Launch Cmder as Admin before running any PTS related ACTS tests.
-
-
-### END SETUP STEPS###
-
-
-Bluetooth PTS controller.
-Mandatory parameters are log_directory and sig_root_directory.
-
-ACTS Config setup:
-"BluetoothPtsDevice": {
-    "log_directory": "C:\\Users\\fsbtt\\Documents\\Profile Tuning Suite\\Test_Dir",
-    "sig_root_directory": "C:\\Program Files (x86)\\Bluetooth SIG"
-}
-
-"""
-from antlion import signals
-from datetime import datetime
-
-import ctypes
-import logging
-import os
-import subprocess
-import time
-import xml.etree.ElementTree as ET
-
-from xml.dom import minidom
-from xml.etree.ElementTree import Element
-
-
-class BluetoothPtsDeviceConfigError(signals.ControllerError):
-    pass
-
-
-class BluetoothPtsSnifferError(signals.ControllerError):
-    pass
-
-
-MOBLY_CONTROLLER_CONFIG_NAME = "BluetoothPtsDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "bluetooth_pts_device"
-
-# Prefix to identify final verdict string. This is a PTS specific log String.
-VERDICT = 'VERDICT/'
-
-# Verdict strings that are specific to PTS.
-VERDICT_STRINGS = {
-    'RESULT_PASS': 'PASS',
-    'RESULT_FAIL': 'FAIL',
-    'RESULT_INCONC': 'INCONC',
-    'RESULT_INCOMP':
-    'INCOMP',  # Initial final verdict meaning that test has not completed yet.
-    'RESULT_NONE':
-    'NONE',  # Error verdict usually indicating internal PTS error.
-}
-
-# Sniffer ready log message.
-SNIFFER_READY = 'SNIFFER/Save and clear complete'
-
-# PTS Log Types as defined by PTS:
-LOG_TYPE_GENERAL_TEXT = 0
-LOG_TYPE_FIRST = 1
-LOG_TYPE_START_TEST_CASE = 1
-LOG_TYPE_TEST_CASE_ENDED = 2
-LOG_TYPE_START_DEFAULT = 3
-LOG_TYPE_DEFAULT_ENDED = 4
-LOG_TYPE_FINAL_VERDICT = 5
-LOG_TYPE_PRELIMINARY_VERDICT = 6
-LOG_TYPE_TIMEOUT = 7
-LOG_TYPE_ASSIGNMENT = 8
-LOG_TYPE_START_TIMER = 9
-LOG_TYPE_STOP_TIMER = 10
-LOG_TYPE_CANCEL_TIMER = 11
-LOG_TYPE_READ_TIMER = 12
-LOG_TYPE_ATTACH = 13
-LOG_TYPE_IMPLICIT_SEND = 14
-LOG_TYPE_GOTO = 15
-LOG_TYPE_TIMED_OUT_TIMER = 16
-LOG_TYPE_ERROR = 17
-LOG_TYPE_CREATE = 18
-LOG_TYPE_DONE = 19
-LOG_TYPE_ACTIVATE = 20
-LOG_TYPE_MESSAGE = 21
-LOG_TYPE_LINE_MATCHED = 22
-LOG_TYPE_LINE_NOT_MATCHED = 23
-LOG_TYPE_SEND_EVENT = 24
-LOG_TYPE_RECEIVE_EVENT = 25
-LOG_TYPE_OTHERWISE_EVENT = 26
-LOG_TYPE_RECEIVED_ON_PCO = 27
-LOG_TYPE_MATCH_FAILED = 28
-LOG_TYPE_COORDINATION_MESSAGE = 29
-
-PTS_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-
-
-def create(config):
-    if not config:
-        raise errors.PTS_DEVICE_EMPTY_CONFIG_MSG
-    return get_instance(config)
-
-
-def destroy(pts):
-    try:
-        pts[0].clean_up()
-    except:
-        pts[0].log.error("Failed to clean up properly.")
-
-
-def get_info(pts_devices):
-    """Get information from the BluetoothPtsDevice object.
-
-    Args:
-        pts_devices: A list of BluetoothPtsDevice objects although only one
-        will ever be specified.
-
-    Returns:
-        A dict, representing info for BluetoothPtsDevice object.
-    """
-    return {
-        "address": pts_devices[0].address,
-        "sniffer_ready": pts_devices[0].sniffer_ready,
-        "ets_manager_library": pts_devices[0].ets_manager_library,
-        "log_directory": pts_devices[0].log_directory,
-        "pts_installation_directory":
-        pts_devices[0].pts_installation_directory,
-    }
-
-
-def get_instance(config):
-    """Create BluetoothPtsDevice instance from a dictionary containing
-    information related to PTS. Namely the SIG root directory as
-    sig_root_directory and the log directory represented by the log_directory.
-
-    Args:
-        config: A dict that contains BluetoothPtsDevice device info.
-
-    Returns:
-        A list of BluetoothPtsDevice objects.
-    """
-    result = []
-    try:
-        log_directory = config.pop("log_directory")
-    except KeyError:
-        raise BluetoothPtsDeviceConfigError(
-            "Missing mandatory log_directory in config.")
-    try:
-        sig_root_directory = config.pop("sig_root_directory")
-    except KeyError:
-        example_path = \
-            "C:\\\\Program Files (x86)\\\\Bluetooth SIG"
-        raise BluetoothPtsDeviceConfigError(
-            "Missing mandatory sig_root_directory in config. Example path: {}".
-            format(example_path))
-
-    # "C:\\Program Files (x86)\\Bluetooth SIG\\Bluetooth PTS\\bin\\ETSManager.dll"
-    ets_manager_library = "{}\\Bluetooth PTS\\bin\\ETSManager.dll".format(
-        sig_root_directory)
-    # "C:\\Program Files (x86)\\Bluetooth SIG\\Bluetooth PTS\\bin"
-    pts_installation_directory = "{}\\Bluetooth PTS\\bin".format(
-        sig_root_directory)
-    # "C:\\Program Files (x86)\\Bluetooth SIG\\Bluetooth Protocol Viewer"
-    pts_sniffer_directory = "{}\\Bluetooth Protocol Viewer".format(
-        sig_root_directory)
-    result.append(
-        BluetoothPtsDevice(ets_manager_library, log_directory,
-                           pts_installation_directory, pts_sniffer_directory))
-    return result
-
-
-class BluetoothPtsDevice:
-    """Class representing an Bluetooth PTS device and associated functions.
-
-    Each object of this class represents one BluetoothPtsDevice in ACTS.
-    """
-
-    _next_action = -1
-    _observers = []
-    address = ""
-    current_implicit_send_description = ""
-    devices = []
-    extra_answers = []
-    log_directory = ""
-    log = None
-    ics = None
-    ixit = None
-    profile_under_test = None
-    pts_library = None
-    pts_profile_mmi_request = ""
-    pts_test_result = VERDICT_STRINGS['RESULT_INCOMP']
-    sniffer_ready = False
-    test_log_directory = ""
-    test_log_prefix = ""
-
-    def __init__(self, ets_manager_library, log_directory,
-                 pts_installation_directory, pts_sniffer_directory):
-        self.log = logging.getLogger()
-        if ets_manager_library is not None:
-            self.ets_manager_library = ets_manager_library
-        self.log_directory = log_directory
-        if pts_installation_directory is not None:
-            self.pts_installation_directory = pts_installation_directory
-        if pts_sniffer_directory is not None:
-            self.pts_sniffer_directory = pts_sniffer_directory
-        # Define callback functions
-        self.USEAUTOIMPLSENDFUNC = ctypes.CFUNCTYPE(ctypes.c_bool)
-        self.use_auto_impl_send_func = self.USEAUTOIMPLSENDFUNC(
-            self.UseAutoImplicitSend)
-
-        self.DONGLE_MSG_FUNC = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_char_p)
-        self.dongle_msg_func = self.DONGLE_MSG_FUNC(self.DongleMsg)
-
-        self.DEVICE_SEARCH_MSG_FUNC = ctypes.CFUNCTYPE(ctypes.c_bool,
-                                                       ctypes.c_char_p,
-                                                       ctypes.c_char_p,
-                                                       ctypes.c_char_p)
-        self.dev_search_msg_func = self.DEVICE_SEARCH_MSG_FUNC(
-            self.DeviceSearchMsg)
-
-        self.LOGFUNC = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_char_p,
-                                        ctypes.c_char_p, ctypes.c_char_p,
-                                        ctypes.c_int, ctypes.c_void_p)
-        self.log_func = self.LOGFUNC(self.Log)
-
-        self.ONIMPLSENDFUNC = ctypes.CFUNCTYPE(ctypes.c_char_p,
-                                               ctypes.c_char_p, ctypes.c_int)
-        self.onimplsend_func = self.ONIMPLSENDFUNC(self.ImplicitSend)
-
-        # Helps with PTS reliability.
-        os.chdir(self.pts_installation_directory)
-        # Load EtsManager
-        self.pts_library = ctypes.cdll.LoadLibrary(self.ets_manager_library)
-        self.log.info("ETS Manager library {0:s} has been loaded".format(
-            self.ets_manager_library))
-        # If post-logging is turned on all callbacks to LPLOG-type function
-        # will be executed after test execution is complete. It is recommended
-        # that post-logging is turned on to avoid simultaneous invocations of
-        # LPLOG and LPAUTOIMPLICITSEND callbacks.
-        self.pts_library.SetPostLoggingEx(True)
-
-        self.xml_root = Element("ARCHIVE")
-        version = Element("VERSION")
-        version.text = "2.0"
-        self.xml_root.append(version)
-        self.xml_pts_pixit = Element("PicsPixit")
-        self.xml_pts_pixit.text = ""
-        self.xml_pts_running_log = Element("LOG")
-        self.xml_pts_running_log.text = ""
-        self.xml_pts_running_summary = Element("SUMMARY")
-        self.xml_pts_running_summary.text = ""
-
-    def clean_up(self):
-        # Since we have no insight to the actual PTS library,
-        # catch all Exceptions and log them.
-        try:
-            self.log.info("Cleaning up Stack...")
-            self.pts_library.ExitStackEx(self.profile_under_test)
-        except Exception as err:
-            self.log.error(
-                "Failed to clean up BluetoothPtsDevice: {}".format(err))
-        try:
-            self.log.info("Unregistering Profile...")
-            self.pts_library.UnregisterProfileEx.argtypes = [ctypes.c_char_p]
-            self.pts_library.UnregisterProfileEx(
-                self.profile_under_test.encode())
-            self.pts_library.UnRegisterGetDevInfoEx()
-        except Exception as err:
-            self.log.error(
-                "Failed to clean up BluetoothPtsDevice: {}".format(err))
-        try:
-            self.log.info("Cleaning up Sniffer")
-            self.pts_library.SnifferTerminateEx()
-        except Exception as err:
-            self.log.error(
-                "Failed to clean up BluetoothPtsDevice: {}".format(err))
-        self.log.info("Cleanup Done.")
-
-    def write_xml_pts_pixit_values_for_current_test(self):
-        """ Writes the current PICS and IXIT values to the XML result.
-        """
-        self.xml_pts_pixit.text = "ICS VALUES:\n\n"
-        for key, value in self.ics.items():
-            self.xml_pts_pixit.text += "{} {}\n".format(
-                key.decode(), value.decode())
-        self.xml_pts_pixit.text += "\nIXIT VALUES:\n\n"
-        for key, (_, value) in self.ixit.items():
-            self.xml_pts_pixit.text += "{} {}\n".format(
-                key.decode(), value.decode())
-
-    def set_ics_and_ixit(self, ics, ixit):
-        self.ics = ics
-        self.ixit = ixit
-
-    def set_profile_under_test(self, profile):
-        self.profile_under_test = profile
-
-    def setup_pts(self):
-        """Prepares PTS to run tests. This needs to be called in test classes
-        after ICS, IXIT, and setting Profile under test.
-        Specifically BluetoothPtsDevice functions:
-            set_profile_under_test
-            set_ics_and_ixit
-        """
-
-        # Register layer to test with callbacks
-        self.pts_library.RegisterProfileWithCallbacks.argtypes = [
-            ctypes.c_char_p, self.USEAUTOIMPLSENDFUNC, self.ONIMPLSENDFUNC,
-            self.LOGFUNC, self.DEVICE_SEARCH_MSG_FUNC, self.DONGLE_MSG_FUNC
-        ]
-        res = self.pts_library.RegisterProfileWithCallbacks(
-            self.profile_under_test.encode(), self.use_auto_impl_send_func,
-            self.onimplsend_func, self.log_func, self.dev_search_msg_func,
-            self.dongle_msg_func)
-
-        self.log.info(
-            "Profile has been registered with result {0:d}".format(res))
-
-        # GetDeviceInfo module is for discovering devices and PTS Dongle address
-        # Initialize GetDeviceInfo and register it with callbacks
-        # First parameter is PTS executable directory
-        self.pts_library.InitGetDevInfoWithCallbacks.argtypes = [
-            ctypes.c_char_p, self.DEVICE_SEARCH_MSG_FUNC, self.DONGLE_MSG_FUNC
-        ]
-        res = self.pts_library.InitGetDevInfoWithCallbacks(
-            self.pts_installation_directory.encode(), self.dev_search_msg_func,
-            self.dongle_msg_func)
-        self.log.info(
-            "GetDevInfo has been initialized with result {0:d}".format(res))
-        # Initialize PTS dongle
-        res = self.pts_library.VerifyDongleEx()
-        self.log.info(
-            "PTS dongle has been initialized with result {0:d}".format(res))
-
-        # Find PTS dongle address
-        self.pts_library.GetDongleBDAddress.restype = ctypes.c_ulonglong
-        self.address = self.pts_library.GetDongleBDAddress()
-        self.address_str = "{0:012X}".format(self.address)
-        self.log.info("PTS BD Address 0x{0:s}".format(self.address_str))
-
-        # Initialize Bluetooth Protocol Viewer communication module
-        self.pts_library.SnifferInitializeEx()
-
-        # If Bluetooth Protocol Viewer is not running, start it
-        if not self.is_sniffer_running():
-            self.log.info("Starting Protocol Viewer")
-            args = [
-                "{}\Executables\Core\FTS.exe".format(
-                    self.pts_sniffer_directory),
-                '/PTS Protocol Viewer=Generic',
-                '/OEMTitle=Bluetooth Protocol Viewer', '/OEMKey=Virtual'
-            ]
-            subprocess.Popen(args)
-            sniffer_timeout = 10
-            while not self.is_sniffer_running():
-                time.sleep(sniffer_timeout)
-
-        # Register to recieve Bluetooth Protocol Viewer notofications
-        self.pts_library.SnifferRegisterNotificationEx()
-        self.pts_library.SetParameterEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p
-        ]
-
-        for ics_name in self.ics:
-            res = self.pts_library.SetParameterEx(
-                ics_name, b'BOOLEAN', self.ics[ics_name],
-                self.profile_under_test.encode())
-            if res:
-                self.log.info("ICS {0:s} set successfully".format(
-                    str(ics_name)))
-            else:
-                self.log.error("Setting ICS {0:s} value failed".format(
-                    str(ics_name)))
-
-        for ixit_name in self.ixit:
-            res = self.pts_library.SetParameterEx(
-                ixit_name, (self.ixit[ixit_name])[0],
-                (self.ixit[ixit_name])[1], self.profile_under_test.encode())
-            if res:
-                self.log.info("IXIT {0:s} set successfully".format(
-                    str(ixit_name)))
-            else:
-                self.log.error("Setting IXIT {0:s} value failed".format(
-                    str(ixit_name)))
-
-        # Prepare directory to store Bluetooth Protocol Viewer output
-        if not os.path.exists(self.log_directory):
-            os.makedirs(self.log_directory)
-
-        address_b = self.address_str.encode("utf-8")
-        self.pts_library.InitEtsEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p
-        ]
-
-        implicit_send_path = "{}\\implicit_send3.dll".format(
-            self.pts_installation_directory).encode()
-        res = self.pts_library.InitEtsEx(self.profile_under_test.encode(),
-                                         self.log_directory.encode(),
-                                         implicit_send_path, address_b)
-        self.log.info("ETS has been initialized with result {0:s}".format(
-            str(res)))
-
-        # Initialize Host Stack DLL
-        self.pts_library.InitStackEx.argtypes = [ctypes.c_char_p]
-        res = self.pts_library.InitStackEx(self.profile_under_test.encode())
-        self.log.info("Stack has been initialized with result {0:s}".format(
-            str(res)))
-
-        # Select to receive Log messages after test is done
-        self.pts_library.SetPostLoggingEx.argtypes = [
-            ctypes.c_bool, ctypes.c_char_p
-        ]
-        self.pts_library.SetPostLoggingEx(True,
-                                          self.profile_under_test.encode())
-
-        # Clear Bluetooth Protocol Viewer. Dongle message callback will update
-        # sniffer_ready automatically. No need to fail setup if the timeout
-        # is exceeded since the logs will still be available just not starting
-        # from a clean slate. Just post a warning.
-        self.sniffer_ready = False
-        self.pts_library.SnifferClearEx()
-        end_time = time.time() + 10
-        while not self.sniffer_ready and time.time() < end_time:
-            time.sleep(1)
-        if not self.sniffer_ready:
-            self.log.warning("Sniffer not cleared. Continuing.")
-
-    def is_sniffer_running(self):
-        """ Looks for running Bluetooth Protocol Viewer process
-
-        Returns:
-            Returns True if finds one, False otherwise.
-        """
-        prog = [
-            line.split()
-            for line in subprocess.check_output("tasklist").splitlines()
-        ]
-        [prog.pop(e) for e in [0, 1, 2]]
-        for task in prog:
-            task_name = task[0].decode("utf-8")
-            if task_name == "Fts.exe":
-                self.log.info("Found FTS process successfully.")
-                # Sleep recommended by PTS.
-                time.sleep(1)
-                return True
-        return False
-
-    def UseAutoImplicitSend(self):
-        """Callback method that defines Which ImplicitSend will be used.
-
-        Returns:
-            True always to inform PTS to use the local implementation.
-        """
-        return True
-
-    def DongleMsg(self, msg_str):
-        """ Receives PTS dongle messages.
-
-        Specifically this receives the Bluetooth Protocol Viewer completed
-        save/clear operations.
-
-        Returns:
-            True if sniffer is ready, False otherwise.
-        """
-        msg = (ctypes.c_char_p(msg_str).value).decode("utf-8")
-        self.log.info(msg)
-        # Sleep recommended by PTS.
-        time.sleep(1)
-        if SNIFFER_READY in msg:
-            self.sniffer_ready = True
-        return True
-
-    def DeviceSearchMsg(self, addr_str, name_str, cod_str):
-        """ Receives device search messages
-
-        Each device may return multiple messages
-        Each message will contain device address and may contain device name and
-        COD.
-
-        Returns:
-            True always and reports to the callback appropriately.
-        """
-        addr = (ctypes.c_char_p(addr_str).value).replace(b'\xed',
-                                                         b' ').decode("utf-8")
-        name = (ctypes.c_char_p(name_str).value).replace(b'\xed',
-                                                         b' ').decode("utf-8")
-        cod = (ctypes.c_char_p(cod_str).value).replace(b'\xed',
-                                                       b' ').decode("utf-8")
-        self.devices.append(
-            "Device address = {0:s} name = {1:s} cod = {2:s}".format(
-                addr, name, cod))
-        return True
-
-    def Log(self, log_time_str, log_descr_str, log_msg_str, log_type, project):
-        """ Receives PTS log messages.
-
-        Returns:
-            True always and reports to the callback appropriately.
-        """
-        log_time = (ctypes.c_char_p(log_time_str).value).decode("utf-8")
-        log_descr = (ctypes.c_char_p(log_descr_str).value).decode("utf-8")
-        log_msg = (ctypes.c_char_p(log_msg_str).value).decode("utf-8")
-        if "Verdict Description" in log_descr:
-            self.xml_pts_running_summary.text += "\t- {}".format(log_msg)
-        if "Final Verdict" in log_descr:
-            self.xml_pts_running_summary.text += "{}{}\n".format(
-                log_descr.strip(), log_msg.strip())
-        full_log_msg = "{}{}{}".format(log_time, log_descr, log_msg)
-        self.xml_pts_running_log.text += "{}\n".format(str(full_log_msg))
-
-        if ctypes.c_int(log_type).value == LOG_TYPE_FINAL_VERDICT:
-            indx = log_msg.find(VERDICT)
-            if indx == 0:
-                if self.pts_test_result == VERDICT_STRINGS['RESULT_INCOMP']:
-                    if VERDICT_STRINGS['RESULT_INCONC'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_INCONC']
-                    elif VERDICT_STRINGS['RESULT_FAIL'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_FAIL']
-                    elif VERDICT_STRINGS['RESULT_PASS'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_PASS']
-                    elif VERDICT_STRINGS['RESULT_NONE'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_NONE']
-        return True
-
-    def ImplicitSend(self, description, style):
-        """ ImplicitSend callback
-
-        Implicit Send Styles:
-            MMI_Style_Ok_Cancel1 =     0x11041, Simple prompt           | OK, Cancel buttons      | Default: OK
-            MMI_Style_Ok_Cancel2 =     0x11141, Simple prompt           | Cancel button           | Default: Cancel
-            MMI_Style_Ok1 =            0x11040, Simple prompt           | OK button               | Default: OK
-            MMI_Style_Yes_No1 =        0x11044, Simple prompt           | Yes, No buttons         | Default: Yes
-            MMI_Style_Yes_No_Cancel1 = 0x11043, Simple prompt           | Yes, No buttons         | Default: Yes
-            MMI_Style_Abort_Retry1 =   0x11042, Simple prompt           | Abort, Retry buttons    | Default: Abort
-            MMI_Style_Edit1 =          0x12040, Request for data input  | OK, Cancel buttons      | Default: OK
-            MMI_Style_Edit2 =          0x12140, Select item from a list | OK, Cancel buttons      | Default: OK
-
-        Handling
-            MMI_Style_Ok_Cancel1
-                OK = return "OK"
-                Cancel = return 0
-
-            MMI_Style_Ok_Cancel2
-                OK = return "OK"
-                Cancel = return 0
-
-            MMI_Style_Ok1
-                OK = return "OK", this version should not return 0
-
-            MMI_Style_Yes_No1
-                Yes = return "OK"
-                No = return 0
-
-            MMI_Style_Yes_No_Cancel1
-                Yes = return "OK"
-                No = return 0
-                Cancel = has been deprecated
-
-            MMI_Style_Abort_Retry1
-                Abort = return 0
-                Retry = return "OK"
-
-            MMI_Style_Edit1
-                OK = return expected string
-                Cancel = return 0
-
-            MMI_Style_Edit2
-                OK = return expected string
-                Cancel = return 0
-
-        Receives ImplicitSend messages
-        Description format is as following:
-        {MMI_ID,Test Name,Layer Name}MMI Action\n\nDescription: MMI Description
-        """
-        descr_str = (ctypes.c_char_p(description).value).decode("utf-8")
-        # Sleep recommended by PTS.
-        time.sleep(1)
-        indx = descr_str.find('}')
-        implicit_send_info = descr_str[1:(indx)]
-        self.current_implicit_send_description = descr_str[(indx + 1):]
-        items = implicit_send_info.split(',')
-        implicit_send_info_id = items[0]
-        implicit_send_info_test_case = items[1]
-        self.pts_profile_mmi_request = items[2]
-        self.log.info(
-            "OnImplicitSend() has been called with the following parameters:\n"
-        )
-        self.log.info("\t\tproject_name = {0:s}".format(
-            self.pts_profile_mmi_request))
-        self.log.info("\t\tid = {0:s}".format(implicit_send_info_id))
-        self.log.info(
-            "\t\ttest_case = {0:s}".format(implicit_send_info_test_case))
-        self.log.info("\t\tdescription = {0:s}".format(
-            self.current_implicit_send_description))
-        self.log.info("\t\tstyle = {0:#X}".format(ctypes.c_int(style).value))
-        self.log.info("")
-        try:
-            self.next_action = int(implicit_send_info_id)
-        except Exception as err:
-            self.log.error(
-                "Setting verdict to RESULT_FAIL, exception found: {}".format(
-                    err))
-            self.pts_test_result = VERDICT_STRINGS['RESULT_FAIL']
-        res = b'OK'
-        if len(self.extra_answers) > 0:
-            res = self.extra_answers.pop(0).encode()
-        self.log.info("Sending Response: {}".format(res))
-        return res
-
-    def log_results(self, test_name):
-        """Log results.
-
-        Saves the sniffer results in cfa format and clears the sniffer.
-
-        Args:
-            test_name: string, name of the test run.
-        """
-        self.pts_library.SnifferCanSaveEx.restype = ctypes.c_bool
-        canSave = ctypes.c_bool(self.pts_library.SnifferCanSaveEx()).value
-        self.pts_library.SnifferCanSaveAndClearEx.restype = ctypes.c_bool
-        canSaveClear = ctypes.c_bool(
-            self.pts_library.SnifferCanSaveAndClearEx()).value
-        file_name = "\\{}.cfa".format(self.test_log_prefix).encode()
-        path = self.test_log_directory.encode() + file_name
-
-        if canSave == True:
-            self.pts_library.SnifferSaveEx.argtypes = [ctypes.c_char_p]
-            self.pts_library.SnifferSaveEx(path)
-        else:
-            self.pts_library.SnifferSaveAndClearEx.argtypes = [ctypes.c_char_p]
-            self.pts_library.SnifferSaveAndClearEx(path)
-        end_time = time.time() + 60
-        while self.sniffer_ready == False and end_time > time.time():
-            self.log.info("Waiting for sniffer to be ready...")
-            time.sleep(1)
-        if self.sniffer_ready == False:
-            raise BluetoothPtsSnifferError(
-                "Sniffer not ready after 60 seconds.")
-
-    def execute_test(self, test_name, test_timeout=60):
-        """Execute the input test name.
-
-        Preps PTS to run the test and waits up to 2 minutes for all steps
-        in the execution to finish. Cleanup of PTS related objects follows
-        any test verdict.
-
-        Args:
-            test_name: string, name of the test to execute.
-        """
-        today = datetime.now()
-        self.write_xml_pts_pixit_values_for_current_test()
-        # TODO: Find out how to grab the PTS version. Temporarily
-        # hardcoded to v.7.4.1.2.
-        self.xml_pts_pixit.text = (
-            "Test Case Started: {} v.7.4.1.2, {} started on {}\n\n{}".format(
-                self.profile_under_test, test_name,
-                today.strftime("%A, %B %d, %Y, %H:%M:%S"),
-                self.xml_pts_pixit.text))
-
-        self.xml_pts_running_summary.text += "Test case : {} started\n".format(
-            test_name)
-        log_time_formatted = "{:%Y_%m_%d_%H_%M_%S}".format(datetime.now())
-        formatted_test_name = test_name.replace('/', '_')
-        formatted_test_name = formatted_test_name.replace('-', '_')
-        self.test_log_prefix = "{}_{}".format(formatted_test_name,
-                                              log_time_formatted)
-        self.test_log_directory = "{}\\{}\\{}".format(self.log_directory,
-                                                      self.profile_under_test,
-                                                      self.test_log_prefix)
-        os.makedirs(self.test_log_directory)
-        curr_test = test_name.encode()
-
-        self.pts_library.StartTestCaseEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p, ctypes.c_bool
-        ]
-        res = self.pts_library.StartTestCaseEx(
-            curr_test, self.profile_under_test.encode(), True)
-        self.log.info("Test has been started with result {0:s}".format(
-            str(res)))
-
-        # Wait till verdict is received
-        self.log.info("Begin Test Execution... waiting for verdict.")
-        end_time = time.time() + test_timeout
-        while self.pts_test_result == VERDICT_STRINGS[
-                'RESULT_INCOMP'] and time.time() < end_time:
-            time.sleep(1)
-        self.log.info("End Test Execution... Verdict {}".format(
-            self.pts_test_result))
-
-        # Clean up after test is done
-        self.pts_library.TestCaseFinishedEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p
-        ]
-        res = self.pts_library.TestCaseFinishedEx(
-            curr_test, self.profile_under_test.encode())
-
-        self.log_results(test_name)
-        self.xml_pts_running_summary.text += "{} finished\n".format(test_name)
-        # Add the log results to the XML output
-        self.xml_root.append(self.xml_pts_pixit)
-        self.xml_root.append(self.xml_pts_running_log)
-        self.xml_root.append(self.xml_pts_running_summary)
-        rough_string = ET.tostring(self.xml_root,
-                                   encoding='utf-8',
-                                   method='xml')
-        reparsed = minidom.parseString(rough_string)
-        with open(
-                "{}\\{}.xml".format(self.test_log_directory,
-                                    self.test_log_prefix), "w") as writter:
-            writter.write(
-                reparsed.toprettyxml(indent="  ", encoding="utf-8").decode())
-
-        if self.pts_test_result is VERDICT_STRINGS['RESULT_PASS']:
-            return True
-        return False
-
-    """Observer functions"""
-
-    def bind_to(self, callback):
-        """ Callbacks to add to the observer.
-        This is used for DUTS automatic responses (ImplicitSends local
-        implementation).
-        """
-        self._observers.append(callback)
-
-    @property
-    def next_action(self):
-        return self._next_action
-
-    @next_action.setter
-    def next_action(self, action):
-        self._next_action = action
-        for callback in self._observers:
-            callback(self._next_action)
-
-    """End Observer functions"""
diff --git a/src/antlion/controllers/buds_controller.py b/src/antlion/controllers/buds_controller.py
deleted file mode 100644
index 0040d85..0000000
--- a/src/antlion/controllers/buds_controller.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""This is the controller module for Pixel Buds devices.
-
-For the device definition, see buds_lib.apollo_lib.
-"""
-
-from antlion.controllers.buds_lib.apollo_lib import ParentDevice
-
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'BudsDevice'
-ACTS_CONTROLLER_REFERENCE_NAME = 'buds_devices'
-
-
-class ConfigError(Exception):
-    """Raised when the configuration is malformatted."""
-
-
-def create(configs):
-    """Creates a Pixel Buds device for each config found within the configs.
-
-    Args:
-        configs: The configs can be structured in the following ways:
-
-                    ['serial1', 'serial2', ... ]
-
-                    [
-                        {
-                            'serial': 'serial1',
-                            'label': 'some_info',
-                            ...
-                        },
-                        {
-                            'serial': 'serial2',
-                            'label': 'other_info',
-                            ...
-                        }
-                    ]
-    """
-    created_controllers = []
-
-    if not isinstance(configs, list):
-        raise ConfigError('Malformatted config %s. Must be a list.' % configs)
-
-    for config in configs:
-        if isinstance(config, str):
-            created_controllers.append(ParentDevice(config))
-        elif isinstance(config, dict):
-            serial = config.get('serial', None)
-            if not serial:
-                raise ConfigError('Buds Device %s is missing entry "serial".' %
-                                  config)
-            created_controllers.append(ParentDevice(serial))
-        else:
-            raise ConfigError('Malformatted config: "%s". Must be a string or '
-                              'dict' % config)
-    return created_controllers
-
-
-def destroy(buds_device_list):
-    pass
-
-
-def get_info(buds_device_list):
-    device_infos = []
-    for buds_device in buds_device_list:
-        device_infos.append({'serial': buds_device.serial_number,
-                             'name': buds_device.device_name})
-    return device_infos
diff --git a/src/antlion/controllers/buds_lib/__init__.py b/src/antlion/controllers/buds_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/buds_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/buds_lib/apollo_lib.py b/src/antlion/controllers/buds_lib/apollo_lib.py
deleted file mode 100644
index 1a63c62..0000000
--- a/src/antlion/controllers/buds_lib/apollo_lib.py
+++ /dev/null
@@ -1,1514 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Apollo Commander through USB/UART interface.
-
-It uses python serial lib to communicate to a Apollo device.
-Some of the commander may not work yet, pending on the final version of the
-commander implementation.
-
-Typical usage examples:
-
-    To get a list of all apollo devices:
-    >>> devices = apollo_lib.get_devices()
-
-    To work with a specific apollo device:
-    >>> apollo = apollo_lib.Device(serial_number='ABCDEF0123456789',
-    >>> commander_port='/dev/ttyACM0')
-
-    To send a single command:
-    >>> apollo.cmd('PowOff')
-
-    To send a list of commands:
-    >>> apollo.cmd(['PowOff', 'PowOn', 'VolUp', 'VolDown']
-"""
-import atexit
-import os
-import re
-import subprocess
-import time
-from logging import Logger
-
-import serial
-from tenacity import retry, stop_after_attempt, wait_exponential
-
-from antlion.controllers.buds_lib import tako_trace_logger
-from antlion.controllers.buds_lib import logserial
-from antlion.controllers.buds_lib.b29_lib import B29Device
-from antlion.controllers.buds_lib.dev_utils import apollo_log_decoder
-from antlion.controllers.buds_lib.dev_utils import apollo_log_regex
-from antlion.controllers.buds_lib.dev_utils import apollo_sink_events
-
-logging = tako_trace_logger.TakoTraceLogger(Logger('apollo'))
-
-BAUD_RATE = 115200
-BYTE_SIZE = 8
-PARITY = 'N'
-STOP_BITS = 1
-DEFAULT_TIMEOUT = 3
-WRITE_TO_FLASH_WAIT = 30  # wait 30 sec when writing to external flash.
-LOG_REGEX = re.compile(r'(?P<time_stamp>\d+)\s(?P<msg>.*)')
-STATUS_REGEX = r'(?P<time_stamp>\d+)\s(?P<key>.+?): (?P<value>.+)'
-APOLLO_CHIP = '_Apollo_'
-DEVICE_REGEX = (
-    r'_(?P<device_serial>[A-Z0-9]+)-(?P<interface>\w+)'
-    r'\s->\s(\.\./){2}(?P<port>\w+)'
-)
-OTA_VERIFICATION_FAILED = 'OTA verification failed. corrupt image?'
-OTA_ERASING_PARTITION = 'INFO OTA eras ptns'
-OTA_RECEIVE_CSR_REGEX = r'INFO OTA CSR rcv begin'
-CODEC_REGEX = r'(?P<time_stamp>\d+)\s(?P<codec>\w+) codec is used.'
-BUILD_REGEX = r'\d+\.\d+\.(?P<build>\d+)-?(?P<psoc_build>\d*)-?(?P<debug>\w*)'
-
-
-class Error(Exception):
-    """Module Level Error."""
-
-
-class ResponseError(Error):
-    """cmd Response Error."""
-
-
-class DeviceError(Error):
-    """Device Error."""
-
-
-class ConnectError(Error):
-    """Connection Error."""
-
-
-def get_devices():
-    """Get all available Apollo devices.
-
-    Returns:
-        (list) A list of available devices or empty list if none found
-
-    Raises:
-        Error: raises Error if no Apollo devices or wrong interfaces were found.
-    """
-    devices = []
-    result = os.popen('ls -l /dev/serial/by-id/*%s*' % APOLLO_CHIP).read()
-    if not result:
-        raise Error('No Apollo Devices found.')
-    for line in result.splitlines():
-        match = re.search(DEVICE_REGEX, line)
-        interface = match.group('interface')
-        # TODO: The commander port will always be None.
-        commander_port = None
-        if interface == 'if00':
-            commander_port = '/dev/' + match.group('port')
-            continue
-        elif interface == 'if02':
-            log_port = '/dev/' + match.group('port')
-        else:
-            raise Error('Wrong interface found.')
-        device_serial = match.group('device_serial')
-
-        device = {
-            'commander_port': commander_port,
-            'log_port': log_port,
-            'serial_number': device_serial
-        }
-        devices.append(device)
-    return devices
-
-
-class BudsDevice(object):
-    """Provides a simple class to interact with Apollo."""
-
-    def __init__(self, serial_number, commander_port=None, log_port=None,
-                 serial_logger=None):
-        """Establish a connection to a Apollo.
-
-        Open a connection to a device with a specific serial number.
-
-        Raises:
-            ConnectError: raises ConnectError if cannot open the device.
-        """
-        self.set_log = False
-        self.connection_handle = None
-        self.device_closed = False
-        if serial_logger:
-            self.set_logger(serial_logger)
-        self.pc = logserial.PortCheck()
-        self.serial_number = serial_number
-        # TODO (kselvakumaran): move this to an interface device class that
-        # apollo_lib.BudsDevice should derive from
-        if not commander_port and not log_port:
-            self.get_device_ports(self.serial_number)
-        if commander_port:
-            self.commander_port = commander_port
-        if log_port:
-            self.log_port = log_port
-        self.apollo_log = None
-        self.cmd_log = None
-        self.apollo_log_regex = apollo_log_regex
-        self.dut_type = 'apollo'
-
-        # TODO (kselvakumaran): move this to an interface device class that
-        # apollo_lib.BudsDevice should derive from
-
-        try:  # Try to open the device
-            self.connection_handle = logserial.LogSerial(
-                self.commander_port, BAUD_RATE, flush_output=False,
-                serial_logger=logging)
-            self.wait_for_commander()
-        except (serial.SerialException, AssertionError, ConnectError) as e:
-            logging.error(
-                'error opening device {}: {}'.format(serial_number, e))
-            raise ConnectError('Error open the device.')
-        # disable sleep on idle
-        self.stay_connected_state = 1
-        atexit.register(self.close)
-
-    def set_logger(self, serial_logger):
-        global logging
-        logging = serial_logger
-        self.set_log = True
-        if self.connection_handle:
-            self.connection_handle.set_logger(serial_logger)
-
-    def get_device_ports(self, serial_number):
-        commander_query = {'ID_SERIAL_SHORT': serial_number,
-                           'ID_USB_INTERFACE_NUM': '00'}
-        log_query = {'ID_SERIAL_SHORT': serial_number,
-                     'ID_USB_INTERFACE_NUM': '02'}
-        self.commander_port = self.pc.search_port_by_property(commander_query)
-        self.log_port = self.pc.search_port_by_property(log_query)
-        if not self.commander_port and not self.log_port:
-            raise ConnectError(
-                'BudsDevice serial number %s not found' % serial_number)
-        else:
-            if not self.commander_port:
-                raise ConnectError('No devices found')
-            self.commander_port = self.commander_port[0]
-            self.log_port = self.log_port[0]
-
-    def get_all_log(self):
-        return self.connection_handle.get_all_log()
-
-    def query_log(self, from_timestamp, to_timestamp):
-        return self.connection_handle.query_serial_log(
-            from_timestamp=from_timestamp, to_timestamp=to_timestamp)
-
-    def send(self, cmd):
-        """Sends the command to serial port.
-
-        It does not care about whether the cmd is successful or not.
-
-        Args:
-            cmd: The passed command
-
-        Returns:
-            The number of characters written
-        """
-        logging.debug(cmd)
-        # with self._lock:
-        self.connection_handle.write(cmd)
-        result = self.connection_handle.read()
-        return result
-
-    def cmd(self, cmds, wait=None):
-        """Sends the commands and check responses.
-
-        Valid cmd will return something like '585857269 running cmd VolUp'.
-        Invalid cmd will log an error and return something like '585826369 No
-        command vol exists'.
-
-        Args:
-            cmds: The commands to the commander.
-            wait: wait in seconds for the cmd response.
-
-        Returns:
-            (list) The second element of the array returned by _cmd.
-        """
-        if isinstance(cmds, str):
-            cmds = [cmds]
-        results = []
-        for cmd in cmds:
-            _, result = self._cmd(cmd, wait=wait)
-            results.append(result)
-        return results
-
-    def _cmd(self, cmd, wait=None, throw_error=True):
-        """Sends a single command and check responses.
-
-        Valid cmd will return something like '585857269 running cmd VolUp'.
-        Invalid cmd will log an error and return something like '585826369 No
-        command vol exists'. Some cmd will return multiple lines of output.
-        eg. 'menu'.
-
-        Args:
-            cmd: The command to the commander.
-            wait: wait in seconds for the cmd response.
-            throw_error: Throw exception on True
-
-        Returns:
-            (list) containing such as the following:
-            [<return value>, [<protobuf dictionary>, str]]
-            Hex strings (protobuf) are replaced by its decoded dictionaries
-            and stored in an arry along with other string returned fom the
-            device.
-
-        Raises:
-            DeviceError: On Error.(Optional)
-        """
-        self.connection_handle.write(cmd)
-
-        while self.connection_handle.is_logging:
-            time.sleep(.01)
-        if wait:
-            self.wait(wait)
-        # Using read_serial_port as readlines is a blocking call until idle.
-        res = self.read_serial_port()
-        result = []
-        self.cmd_log = res
-        command_resv = False
-        # TODO: Cleanup the usage of the two booleans below.
-        command_finish = False
-        command_rejected = False
-        # for line in iter_res:
-        for line in res:
-            if isinstance(line, dict):
-                if 'COMMANDER_RECV_COMMAND' in line.values():
-                    command_resv = True
-                elif 'COMMANDER_REJECT_COMMAND' in line.values():
-                    logging.info('Command rejected')
-                    command_rejected = True
-                    break
-                elif 'COMMANDER_FINISH_COMMAND' in line.values():
-                    command_finish = True
-                    break
-                elif (command_resv and not command_finish and
-                      not command_rejected):
-                    result.append(line)
-            # TODO(jesussalinas): Remove when only encoded lines are required
-            elif command_resv and not command_finish and not command_rejected:
-                if 'running cmd' not in line:
-                    result.append(line)
-        success = True
-        if command_rejected or not command_resv:
-            success = False
-            if throw_error:
-                logging.info(res)
-                raise DeviceError('Unknown command %s' % cmd)
-        return success, result
-
-    def get_pdl(self):
-        """Returns the PDL stack dictionary.
-
-        The PDL stack stores paired devices of Apollo. Each PDL entry include
-        mac_address, flags, link_key, priority fields.
-
-        Returns:
-            list of pdl dicts.
-        """
-        # Get the mask from CONNLIB41:
-        # CONNLIB41 typically looks something like this: 2403 fff1
-        # 2403 fff1 is actually two 16-bit words of a 32-bit integer
-        # like 0xfff12403 . This tells the chronological order of the entries
-        # in the paired device list one nibble each. LSB to MSB corresponds to
-        # CONNLIB42 through CONNLIB49. So, the above tells us that the device at
-        # 0x2638 is the 3rd most recent entry 0x2639 the latest entry etc. As
-        # a device re-pairs the masks are updated.
-        response = []
-        mask = 'ffffffff'
-        res = self.cmd('GetPSHex 0x2637')
-        if len(res[0]) == 0:
-            logging.warning('Error reading PDL mask @ 0x2637')
-            return response
-        else:
-            regexp = r'\d+\s+(?P<m1>....)\s(?P<m2>....)'
-            match = re.match(regexp, res[0][0])
-            if match:
-                connlib41 = match.group('m2') + match.group('m1')
-                mask = connlib41[::-1]
-                logging.debug('PDL mask: %s' % mask)
-
-        # Now get the MAC/link key
-        mask_idx = 0
-        for i in range(9784, 9883):
-            types = {}
-            res = self.cmd('GetPSHex ' + '%0.2x' % i)
-            if len(res[0]) == 0:
-                break
-            else:
-                regexp = ('\d+\s+(?P<Mac>....\s....\s....)\s'
-                          '(?P<Flags>....\s....)\s(?P<Linkkey>.*)')
-                match = re.search(regexp, res[0][0])
-                if match:
-                    mac_address = match.group('Mac').replace(' ', '').upper()
-                    formatted_mac = ''
-                    for i in range(len(mac_address)):
-                        formatted_mac += mac_address[i]
-                        if i % 2 != 0 and i < (len(mac_address) - 1):
-                            formatted_mac += ':'
-                    types['mac_address'] = formatted_mac
-                    types['flags'] = match.group('Flags').replace(' ', '')
-                    types['link_key'] = match.group('Linkkey').replace(' ', '')
-                    types['priority'] = int(mask[mask_idx], 16)
-                    mask_idx += 1
-                    response.append(types)
-
-        return response
-
-    def set_pairing_mode(self):
-        """Enter Bluetooth Pairing mode."""
-        logging.debug('Inside set_pairing_mode()...')
-        try:
-            return self.cmd('Pair')
-        except DeviceError:
-            logging.exception('Pair cmd failed')
-
-    # TODO (kselvakumaran): move this to an interface BT class that
-    # apollo_lib.BudsDevice should derive from
-    def turn_on_bluetooth(self):
-        return True
-
-    # TODO (kselvakumaran): move this to an interface BT class that
-    # apollo_lib.BudsDevice should derive from
-    def is_bt_enabled(self):
-        """Check if BT is enabled.
-
-        (TODO:weisu)Currently it is always true since there is no way to disable
-        BT in apollo
-
-        Returns:
-            True if BT is enabled.
-        """
-        logging.debug('Inside is_bt_enabled()...')
-        return True
-
-    def panic(self):
-        """Hitting a panic, device will be automatically reset after 5s."""
-        logging.debug('Inside panic()...')
-        try:
-            self.send('panic')
-        except serial.SerialException:
-            logging.exception('panic cmd failed')
-
-    def power(self, cmd):
-        """Controls the power state of the device.
-
-        Args:
-            cmd: If 'Off', powers the device off. Otherwise, powers the device
-                 on.
-        """
-        logging.debug('Inside power({})...'.format(cmd))
-        mode = '0' if cmd == 'Off' else '1'
-        cmd = 'Pow ' + mode
-        try:
-            return self.cmd(cmd)
-        except DeviceError:
-            logging.exception('{} cmd failed'.format(cmd))
-
-    def charge(self, state):
-        """Charging Control of the device.
-
-        Args:
-          state: '1/0' to enable/disable charging.
-        """
-        logging.debug('Inside charge({})...'.format(state))
-        cmd = 'chg ' + state
-        try:
-            self.cmd(cmd)
-        except DeviceError:
-            logging.exception('{} cmd failed'.format(cmd))
-
-    def get_battery_level(self):
-        """Get the battery charge level.
-
-        Returns:
-            charge percentage string.
-
-        Raises:
-            DeviceError: GetBatt response error.
-        """
-        response = self.cmd('GetBatt')
-        for line in response[0]:
-            if line.find('Batt:') > -1:
-                # Response if in this format '<messageID> Batt: <percentage>'
-                return line.split()[2]
-        raise DeviceError('Battery Level not found in GetBatt response')
-
-    def get_gas_gauge_current(self):
-        """Get the Gauge current value.
-
-        Returns:
-            Float value with the info
-
-        Raises:
-            DeviceError: I2CRead response error.
-        """
-        response = self.cmd('I2CRead 2 0x29')
-        for line in response[0]:
-            if line.find('value') > -1:
-                return float.fromhex(line.split()[6].replace(',', ''))
-        raise DeviceError('Current Level not found in I2CRead response')
-
-    def get_gas_gauge_voltage(self):
-        """Get the Gauge voltage value.
-
-        Returns:
-            Float value with the info
-
-        Raises:
-            DeviceError: I2CRead response error.
-        """
-        response = self.cmd('I2CRead 2 0x2A')
-        for line in response[0]:
-            if line.find('value') > -1:
-                return float.fromhex(line.split()[6].replace(',', ''))
-        raise DeviceError('Voltage Level not found in I2CRead response')
-
-    def reset(self, wait=5):
-        """Resetting the device."""
-        logging.debug('Inside reset()...')
-        self.power('Off')
-        self.wait(wait)
-        self.power('On')
-
-    def close(self):
-        if not self.device_closed:
-            self.connection_handle.close()
-            self.device_closed = True
-            if not self.set_log:
-                logging.flush_log()
-
-    def get_serial_log(self):
-        """Retrieve the logs from connection handle."""
-        return self.connection_handle.get_all_log()
-
-    def factory_reset(self):
-        """Erase paired device(s) (bond) data and reboot device."""
-        cmd = 'FactoryReset 1'
-        self.send(cmd)
-        self.wait(5)
-        self.reconnect()
-
-    def reboot(self, reconnect=10, retry_timer=30):
-        """Rebooting the device.
-
-        Args:
-            reconnect: reconnect attempts after reboot, None for no reconnect.
-            retry_timer: wait time in seconds before next connect retry.
-
-        Returns:
-            True if successfully reboot or reconnect.
-        """
-        logging.debug('Inside reboot()...')
-        self.panic()
-        if not reconnect:
-            return True
-        ini_time = time.time()
-        message = 'waiting for {} to shutdown'.format(self.serial_number)
-        logging.info(message)
-        while True:
-            alive = self.connection_handle.is_port_alive()
-            if not alive:
-                logging.info('rebooted')
-                break
-            if time.time() - ini_time > 60:
-                logging.info('Shutdown timeouted')
-                break
-            time.sleep(0.5)
-        return self.reconnect(reconnect, retry_timer)
-
-    def reconnect(self, iterations=30, retry_timer=20):
-        """Reconnect to the device.
-
-        Args:
-            iterations: Number of retry iterations.
-            retry_timer: wait time in seconds before next connect retry.
-
-        Returns:
-            True if reconnect to the device successfully.
-
-        Raises:
-            DeviceError: Failed to reconnect.
-        """
-        logging.debug('Inside reconnect()...')
-        for i in range(iterations):
-            try:
-                # port might be changed, refresh the port list.
-                self.get_device_ports(self.serial_number)
-                message = 'commander_port: {}, log_port: {}'.format(
-                    self.commander_port, self.log_port)
-                logging.info(message)
-                self.connection_handle.refresh_port_connection(
-                    self.commander_port)
-                # Sometimes there might be sfome delay when commander is
-                # functioning.
-                self.wait_for_commander()
-                return True
-            except Exception as e:  # pylint: disable=broad-except
-                message = 'Fail to connect {} times due to {}'.format(
-                    i + 1, e)
-                logging.warning(message)
-                # self.close()
-                time.sleep(retry_timer)
-        raise DeviceError('Cannot reconnect to %s with %d attempts.',
-                          self.commander_port, iterations)
-
-    @retry(stop=stop_after_attempt(4),
-           wait=wait_exponential())
-    def wait_for_commander(self):
-        """Wait for commander to function.
-
-        Returns:
-            True if commander worked.
-
-        Raises:
-            DeviceError: Failed to bring up commander.
-        """
-        # self.Flush()
-        result = self.cmd('menu')
-        if result:
-            return True
-        else:
-            raise DeviceError('Cannot start commander.')
-
-    def wait(self, timeout=1):
-        """Wait for the device."""
-        logging.debug('Inside wait()...')
-        time.sleep(timeout)
-
-    def led(self, cmd):
-        """LED control of the device."""
-        message = 'Inside led({})...'.format(cmd)
-        logging.debug(message)
-        cmd = 'EventUsrLeds' + cmd
-        try:
-            return self.cmd(_evt_hex(cmd))
-        except DeviceError:
-            logging.exception('LED cmd failed')
-
-    def volume(self, key, times=1):
-        """Volume Control. (Down/Up).
-
-        Args:
-            key: Down --Decrease a volume.
-                 Up --Increase a volume.
-            times: Simulate number of swipes.
-
-        Returns:
-            (int) Volume level.
-
-        Raises:
-            DeviceError
-        """
-        message = 'Inside volume({}, {})...'.format(key, times)
-        logging.debug(message)
-        updown = {
-            'Up': '1',
-            'Down': '0',
-        }
-        cmds = ['ButtonSwipe ' + updown[key]] * times
-        logging.info(cmds)
-        try:
-            self.cmd(cmds)
-            for line in self.cmd_log:
-                if isinstance(line, dict):
-                    if 'id' in line and line['id'] == 'VOLUME_CHANGE':
-                        if 'data' in line and line['data']:
-                            return int(line['data'])
-        except DeviceError:
-            logging.exception('ButtonSwipe cmd failed')
-
-    def menu(self):
-        """Return a list of supported commands."""
-        logging.debug('Inside menu()...')
-        try:
-            return self.cmd('menu')
-        except DeviceError:
-            logging.exception('menu cmd failed')
-
-    def set_ohd(self, mode='AUTO'):
-        """Manually set the OHD status and override auto-detection.
-
-        Args:
-            mode: ON --OHD manual mode with on-ear state.
-                  OFF --OHD manual mode with off-ear state.
-                  AUTO --OHD auto-detection mode.
-        Raises:
-            DeviceError: OHD Command failure.
-        """
-        logging.debug('Inside set_ohd()...')
-        try:
-            if mode != 'AUTO':
-                # Set up OHD manual mode
-                self.cmd('Test 14 0 2 1')
-                if mode == 'ON':
-                    # Detects on-ear
-                    self.cmd('Test 14 0 2 1 0x3')
-                else:
-                    # Detects off-ear
-                    self.cmd('Test 14 0 2 1 0x0')
-            else:
-                # Default mode (auto detect.)
-                self.cmd('Test 14 0 2 0')
-        except DeviceError:
-            logging.exception('OHD cmd failed')
-
-    def music_control_events(self, cmd, regexp=None, wait=.5):
-        """Sends the EvtHex to control media player.
-
-        Arguments:
-            cmd: the command to perform.
-            regexp: Optional pattern to validate the event logs.
-
-        Returns:
-            Boolean: True if the command triggers the correct events on the
-                     device, False otherwise.
-
-        # TODO(nviboonchan:) Add more supported commands.
-        Supported commands:
-            'PlayPause'
-            'VolumeUp'
-            'VolumeDown',
-        """
-        cmd_regexp = {
-            # Play/ Pause would need to pass the regexp argument since it's
-            # sending the same event but returns different responses depending
-            # on the device state.
-            'VolumeUp': apollo_log_regex.VOLUP_REGEX,
-            'VolumeDown': apollo_log_regex.VOLDOWN_REGEX,
-        }
-        if not regexp:
-            if cmd not in cmd_regexp:
-                logmsg = 'Expected pattern is not defined for event %s' % cmd
-                logging.exception(logmsg)
-                return False
-            regexp = cmd_regexp[cmd]
-        self.cmd('EvtHex %s' % apollo_sink_events.SINK_EVENTS['EventUsr' + cmd],
-                 wait=wait)
-        for line in self.cmd_log:
-            if isinstance(line, str):
-                if re.search(regexp, line):
-                    return True
-            elif isinstance(line, dict):
-                if line.get('id', None) == 'AVRCP_PLAY_STATUS_CHANGE':
-                    return True
-        return False
-
-    def avrcp(self, cmd):
-        """sends the Audio/Video Remote Control Profile (avrcp) control command.
-
-        Supported commands:
-            'PlayPause'
-            'Stop'
-            'SkipForward',
-            'SkipBackward',
-            'FastForwardPress',
-            'FastForwardRelease',
-            'RewindPress',
-            'RewindRelease',
-            'ShuffleOff',
-            'ShuffleAllTrack',
-            'ShuffleGroup',
-            'RepeatOff':,
-            'RepeatSingleTrack',
-            'RepeatAllTrack',
-            'RepeatGroup',
-            'Play',
-            'Pause',
-            'ToggleActive',
-            'NextGroupPress',
-            'PreviousGroupPress',
-            'NextGroupRelease',
-            'PreviousGroupRelease',
-
-        Args:
-            cmd: The avrcp command.
-
-        """
-        cmd = 'EventUsrAvrcp' + cmd
-        logging.debug(cmd)
-        try:
-            self.cmd(_evt_hex(cmd))
-        except DeviceError:
-            logging.exception('avrcp cmd failed')
-
-    def enable_log(self, levels=None):
-        """Enable specified logs."""
-        logging.debug('Inside enable_log()...')
-        if levels is None:
-            levels = ['ALL']
-        masks = hex(
-            sum([int(apollo_sink_events.LOG_FEATURES[x], 16) for x in levels]))
-        try:
-            self.cmd('LogOff %s' % apollo_sink_events.LOG_FEATURES['ALL'])
-            return self.cmd('LogOn %s' % masks)
-        except DeviceError:
-            logging.exception('Enable log failed')
-
-    def disable_log(self, levels=None):
-        """Disable specified logs."""
-        logging.debug('Inside disable_log()...')
-        if levels is None:
-            levels = ['ALL']
-        masks = hex(
-            sum([int(apollo_sink_events.LOG_FEATURES[x], 16) for x in levels]))
-        try:
-            self.cmd('LogOn %s' % apollo_sink_events.LOG_FEATURES['ALL'])
-            return self.cmd('LogOff %s' % masks)
-        except DeviceError:
-            logging.exception('Disable log failed')
-
-    def write_to_flash(self, file_name=None):
-        """Write file to external flash.
-
-        Note: Assume pv is installed. If not, install it by
-              'apt-get install pv'.
-
-        Args:
-            file_name: Full path file name.
-
-        Returns:
-            Boolean: True if write to partition is successful. False otherwise.
-        """
-        logging.debug('Inside write_to_flash()...')
-        if not os.path.isfile(file_name):
-            message = 'DFU file %s not found.'.format(file_name)
-            logging.exception(message)
-            return False
-        logging.info(
-            'Write file {} to external flash partition ...'.format(file_name))
-        image_size = os.path.getsize(file_name)
-        logging.info('image size is {}'.format(image_size))
-        results = self.cmd('Ota {}'.format(image_size), wait=3)
-        logging.debug('Result of Ota command' + str(results))
-        if any(OTA_VERIFICATION_FAILED in result for result in results[0]):
-            return False
-        # finished cmd Ota
-        if (any('OTA_ERASE_PARTITION' in result.values() for result in
-                results[0] if
-                isinstance(result, dict)) or
-                any('OTA erasd ptns' in result for result in results[0])):
-            try:
-                # -B: buffer size in bytes, -L rate-limit in B/s.
-                subcmd = ('pv --force -B 160 -L 10000 %s > %s' %
-                          (file_name, self.commander_port))
-                logging.info(subcmd)
-                p = subprocess.Popen(subcmd, stdout=subprocess.PIPE, shell=True)
-            except OSError:
-                logging.exception(
-                    'pv not installed, please install by: apt-get install pv')
-                return False
-            try:
-                res = self.read_serial_port(read_until=6)
-            except DeviceError:
-                logging.exception('Unable to read the device port')
-                return False
-            for line in res:
-                if isinstance(line, dict):
-                    logging.info(line)
-                else:
-                    match = re.search(OTA_RECEIVE_CSR_REGEX, line)
-                    if match:
-                        logging.info(
-                            'OTA Image received. Transfer is in progress...')
-                        # Polling during a transfer could miss the final message
-                        # when the device reboots, so we wait until the transfer
-                        # completes.
-                        p.wait()
-                        return True
-            # No image transfer in progress.
-            return False
-        else:
-            return False
-
-    def flash_from_file(self, file_name, reconnect=True):
-        """Upgrade Apollo from an image file.
-
-        Args:
-            file_name: DFU file name. eg. /google/data/ro/teams/wearables/
-                       apollo/ota/master/v76/apollo.dfu
-            reconnect: True to reconnect the device after flashing
-        Returns:
-            Bool: True if the upgrade is successful. False otherwise.
-        """
-        logging.debug('Inside flash_from_file()...')
-        if self.write_to_flash(file_name):
-            logging.info('OTA image transfer is completed')
-            if reconnect:
-                # Transfer is completed; waiting for the device to reboot.
-                logging.info('wait to make sure old connection disappears.')
-                self.wait_for_reset(timeout=150)
-                self.reconnect()
-                logging.info('BudsDevice reboots successfully after OTA.')
-            return True
-
-    def open_mic(self, post_delay=5):
-        """Open Microphone on the device using EvtHex command.
-
-        Args:
-            post_delay: time delay in seconds after the microphone is opened.
-
-        Returns:
-            Returns True or False based on whether the command was executed.
-        """
-        logging.debug('Inside open_mic()...')
-        success, _ = self._cmd('Voicecmd 1', post_delay)
-        return success
-
-    def close_mic(self, post_delay=5):
-        """Close Microphone on the device using EvtHex command.
-
-        Args:
-            post_delay: time delay in seconds after the microphone is closed.
-
-        Returns:
-            Returns true or false based on whether the command was executed.
-        """
-        logging.debug('Inside close_mic()...')
-        success, _ = self._cmd('Voicecmd 0', post_delay)
-        return success
-
-    def touch_key_press_event(self, wait=1):
-        """send key press event command.
-
-        Args:
-            wait: Inject delay after key press to simulate real touch event .
-        """
-        logging.debug('Inside KeyPress()...')
-        self._cmd('Touch 6')
-        self.wait(wait)
-
-    def touch_tap_event(self, wait_if_pause=10):
-        """send key release event after key press to simulate single tap.
-
-        Args:
-            wait_if_pause: Inject delay after avrcp pause was detected.
-
-        Returns:
-            Returns False if avrcp play orp ause not detected else True.
-        """
-        logging.debug('Inside Touch Tap event()...')
-        self._cmd('Touch 4')
-        for line in self.cmd_log:
-            if 'avrcp play' in line:
-                logging.info('avrcp play detected')
-                return True
-            if 'avrcp pause' in line:
-                logging.info('avrcp pause detected')
-                self.wait(wait_if_pause)
-                return True
-        return False
-
-    def touch_hold_up_event(self):
-        """Open Microphone on the device using touch hold up command.
-
-        Returns:
-            Returns True or False based on whether the command was executed.
-        """
-        logging.debug('Inside open_mic()...')
-        self._cmd('Touch 3')
-        for line in self.cmd_log:
-            if 'Button 1 LONG_BEGIN' in line:
-                logging.info('mic open success')
-                return True
-        return False
-
-    def touch_hold_down_event(self):
-        """Close Microphone on the device using touch hold down command.
-
-        Returns:
-            Returns true or false based on whether the command was executed.
-        """
-        logging.debug('Inside close_mic()...')
-        self._cmd('Touch 8')
-        for line in self.cmd_log:
-            if 'Button 1 LONG_END' in line:
-                logging.info('mic close success')
-                return True
-        return False
-
-    def tap(self):
-        """Performs a Tap gesture."""
-        logging.debug('Inside tap()')
-        self.cmd('ButtonTap 0')
-
-    def hold(self, duration):
-        """Tap and hold a button.
-
-        Args:
-            duration: (int) duration in milliseconds.
-        """
-        logging.debug('Inside hold()')
-        self.cmd('ButtonHold ' + str(duration))
-
-    def swipe(self, direction):
-        """Perform a swipe gesture.
-
-        Args:
-            direction: (int) swipe direction 1 forward, 0 backward.
-        """
-        logging.debug('Inside swipe()')
-        self.cmd('ButtonSwipe ' + direction)
-
-    def get_pskey(self, key):
-        """Fetch value from persistent store."""
-        try:
-            cmd = 'GetPSHex ' + apollo_sink_events.PSKEY[key]
-        except KeyError:
-            raise DeviceError('PS Key: %s not found' % key)
-        pskey = ''
-        try:
-            ret = self.cmd(cmd)
-            for result in ret[0]:
-                if not re.search(r'pskey', result.lower()) and LOG_REGEX.match(
-                        result):
-                    # values are broken into words separated by spaces.
-                    pskey += LOG_REGEX.match(result).group('msg').replace(' ',
-                                                                          '')
-                else:
-                    continue
-        except DeviceError:
-            logging.exception('GetPSHex cmd failed')
-        return pskey
-
-    def get_version(self):
-        """Return a device version information.
-
-        Note: Version information is obtained from the firmware loader. Old
-        information is lost when firmware is updated.
-        Returns:
-            A dictionary of device version info. eg.
-            {
-                'Fw Build': '73',
-                'OTA Status': 'No OTA performed before this boot',
-            }
-
-        """
-        logging.debug('Inside get_version()...')
-        success, result = self._cmd('GetVer', throw_error=False)
-        status = {}
-        if result:
-            for line in result:
-                if isinstance(line, dict):
-                    status['build'] = line['vm_build_number']
-                    status['psoc_build'] = line['psoc_version']
-                    status['debug'] = line['csr_fw_debug_build']
-                    status['Fw Build Label'] = line['build_label']
-                    if 'last_ota_status' in line.keys():
-                        # Optional value in the proto response
-                        status['OTA Status'] = line['last_ota_status']
-                    else:
-                        status['OTA Status'] = 'No info'
-        return success, status
-
-    def get_earcon_version(self):
-        """Return a device Earson version information.
-
-        Returns:
-            Boolean:  True if success, False otherwise.
-            String: Earon Version e.g. 7001 0201 6100 0000
-
-        """
-        # TODO(nviboonchan): Earcon version format would be changed in the
-        # future.
-        logging.debug('Inside get_earcon_version()...')
-        result = self.get_pskey('PSKEY_EARCON_VERSION')
-        if result:
-            return True, result
-        else:
-            return False, None
-
-    def get_bt_status(self):
-        """Return a device bluetooth connection information.
-
-        Returns:
-            A dictionary of bluetooth status. eg.
-            {
-                'Comp. App': 'FALSE',
-               'HFP (pri.)', 'FALSE',
-               'HFP (sec.)': 'FALSE',
-               'A2DP (pri.)': 'FALSE',
-               'A2DP (sec.)': 'FALSE',
-               'A2DP disconnects': '3',
-               'A2DP Role (pri.)': 'slave',
-               'A2DP RSSI (pri.)': '-Touch'
-            }
-        """
-        logging.debug('Inside get_bt_status()...')
-        return self._get_status('GetBTStatus')
-
-    def get_conn_devices(self):
-        """Gets the BT connected devices.
-
-        Returns:
-            A dictionary of BT connected devices. eg.
-            {
-                'HFP Pri': 'xxxx',
-                'HFP Sec': 'xxxx',
-                'A2DP Pri': 'xxxx',
-                'A2DP Sec': 'xxxx',
-                'RFCOMM devices': 'xxxx',
-                'CTRL': 'xxxx',
-                'AUDIO': 'None',
-                'DEBUG': 'None',
-                'TRANS': 'None'
-             }
-
-        Raises:
-            ResponseError: If unexpected response occurs.
-        """
-        response_regex = re.compile('[0-9]+ .+: ')
-        connected_status = {}
-        response = self.cmd('GetConnDevices')
-        if not response:
-            raise ResponseError(
-                'No response returned by GetConnDevices command')
-        for line in response[0]:
-            if response_regex.search(line):
-                profile, value = line[line.find(' '):].split(':', 1)
-                connected_status[profile] = value
-        if not connected_status:
-            raise ResponseError('No BT Profile Status in response.')
-        return connected_status
-
-    def _get_status(self, cmd):
-        """Return a device status information."""
-        status = {}
-        try:
-            results = self.cmd(cmd)
-        except DeviceError as ex:
-            # logging.exception('{} cmd failed'.format(cmd))
-            logging.warning('Failed to get device status info.')
-            raise ex
-        results = results[0]
-        for result in results:
-            match = re.match(STATUS_REGEX, result)
-            if match:
-                key = match.group('key')
-                value = match.group('value')
-                status.update({key: value})
-        return status
-
-    def is_streaming(self):
-        """Returns the music streaming status on Apollo.
-
-        Returns:
-            Boolean: True if device is streaming music. False otherwise.
-        """
-
-        status = self.cmd('GetDSPStatus')
-        if any('active feature mask: 0' in log for log in
-               status[0]):
-            return False
-        elif any('active feature mask: 2' in log for log in
-                 status[0]):
-            return True
-        else:
-            return False
-
-    def is_in_call(self):
-        """Returns the phone call status on Apollo.
-
-        Returns:
-            Boolean: True if device has incoming call. False otherwise.
-        """
-
-        status = self.cmd('GetDSPStatus')
-        if not any('Inc' or 'out' in log for log in status[0]):
-            return False
-        return True
-
-    def is_device_limbo(self):
-        """Check if device is in Limbo state.
-
-        Returns:
-            Boolean: True if device is in limbo state, False otherwise.
-        """
-        device_state = self.get_device_state()
-        logging.info('BudsDevice "{}" state {}'.format(self.serial_number,
-                                                       device_state))
-        return device_state == 'limbo'
-
-    def get_device_state(self):
-        """Get state of the device.
-
-        Returns:
-            String representing the device state.
-
-        Raises:
-            DeviceError: If command fails.
-        """
-        _, status = self._cmd('GetDSPStatus')
-        for stat in status:
-            if isinstance(stat, dict):
-                logging.info(stat)
-                return stat['sink_state'].lower()
-        raise DeviceError('BudsDevice state not found in GetDSPStatus.')
-
-    def set_stay_connected(self, value):
-        """Run command to set the value for SetAlwaysConnected.
-
-        Args:
-            value: (int) 1 to keep connection engages at all time,
-                         0 for restoring
-        Returns:
-            the set state of type int (0 or 1) or None if not applicable
-        """
-
-        if int(self.version) >= 1663:
-            self._cmd('SetAlwaysConnected {}'.format(value))
-            logging.info('Setting sleep on idle to {}'.format(value))
-            return value
-
-    def get_codec(self):
-        """Get device's current audio codec.
-
-        Returns:
-            String representing the audio codec.
-
-        Raises:
-            DeviceError: If command fails.
-        """
-        success, status = self._cmd('get_codec')
-        logging.info('---------------------------------------')
-        logging.info(status)
-        logging.info('---------------------------------------')
-        if success:
-            for line in status:
-                if isinstance(line, dict):
-                    logging.info('Codec found: %s'.format(line['codec']))
-                    return line['codec']
-        raise DeviceError('BudsDevice state not found in get_codec.')
-
-    def crash_dump_detection(self):
-        """Reads crash dump determines if a crash is detected.
-
-        Returns:
-            True if crash detection is supported and if a new crash is found.
-            False otherwise.
-        """
-        # Detects if crashdump output is new
-        new_crash_regex = r'new crash = ([01]+)'
-        # filter crashdump for just the trace
-        crash_stack_regex = r'BASIC(.*)\n[\d]+ APP_STACK(.*)\n'
-        # remove time stamp commander output
-        timestamp_remover_regex = '\n[\\d]+ '
-
-        logging.debug('Inside IsCrashDumpDetection()...')
-        cmd_return = self.cmd('CrashDump', wait=1)
-        crash_dump_str = '\n'.join(cmd_return[0])
-        logging.info(crash_dump_str)
-        try:
-            # check for crash
-            match = re.search(new_crash_regex, crash_dump_str)
-            if match is not None:
-                if match.groups()[0] == '1':  # new crash found
-                    logging.error('Crash detected!!')
-                    basic, app_stack = re.search(crash_stack_regex,
-                                                 crash_dump_str,
-                                                 re.DOTALL).groups()
-                    # remove time stamps from capture
-                    basic = re.sub(timestamp_remover_regex, '', basic)
-                    app_stack = re.sub(timestamp_remover_regex, '', app_stack)
-                    # write to log
-                    # pylint: disable=bad-whitespace
-                    logging.info(
-                        '\n&270d = %s\n&270e = %s\n' % (basic, app_stack))
-                    # pylint: enable=bad-whitespace
-                    return True
-                else:  # no new crash
-                    logging.info('No crash detected')
-                    return False
-        except AttributeError:
-            logging.exception(
-                'Apollo crash dump output is not in expected format')
-            raise DeviceError('Apollo crash dump not in expected format')
-
-    @property
-    def version(self):
-        """Application version.
-
-        Returns:
-            (String) Firmware version.
-        """
-        _, result = self.get_version()
-        return result['build']
-
-    @property
-    def bluetooth_address(self):
-        """Bluetooth MAC address.
-
-        Returns:
-            a string representing 48bit BT MAC address in Hex.
-
-        Raises:
-            DeviceError: Unable to find BT Address
-        """
-        results = self.get_pskey('PSKEY_BDADDR')
-        if not results:
-            raise DeviceError('Unable to find BT Address')
-        logging.info(results)
-        # Bluetooth lower address part, upper address part and non-significant
-        # address part.
-        bt_lap = results[2:8]
-        bt_uap = results[10:12]
-        bt_nap = results[12:16]
-        results = bt_nap + bt_uap + bt_lap
-
-        return ':'.join(map(''.join, zip(*[iter(results)] * 2))).upper()
-
-    @property
-    def device_name(self):
-        """Device Friendly Name.
-
-        Returns:
-            a string representing device friendly name.
-
-        Raises:
-            DeviceError: Unable to find a wearable device name.
-        """
-        result = self.get_pskey('PSKEY_DEVICE_NAME')
-        if not result:
-            raise DeviceError('Unable to find BudsDevice Name')
-        logging.info(_to_ascii(result))
-        return _to_ascii(result)
-
-    @property
-    def stay_connected(self):
-        return self.stay_connected_state
-
-    @stay_connected.setter
-    def stay_connected(self, value):
-        self.stay_connected_state = self.set_stay_connected(value)
-
-    def read_serial_port(self, read_until=None):
-        """Read serial port until specified read_until value in seconds."""
-        # use default read_until value if not specified
-        if read_until:
-            time.sleep(read_until)
-        res = self.connection_handle.read()
-        buf_read = []
-        for line in res:
-            if apollo_log_decoder.is_automation_protobuf(line):
-                decoded = apollo_log_decoder.decode(line)
-                buf_read.append(decoded)
-            else:
-                buf_read.append(line)
-        return buf_read
-
-    def wait_for_reset(self, timeout=30):
-        """waits for the device to reset by check serial enumeration.
-
-        Checks every .5 seconds for the port.
-
-        Args:
-            timeout: The max time to wait for the device to disappear.
-
-        Returns:
-            Bool: True if the device reset was detected. False if not.
-        """
-        start_time = time.time()
-        while True:
-            res = subprocess.Popen(['ls', self.commander_port],
-                                   stdout=subprocess.PIPE,
-                                   stderr=subprocess.PIPE)
-            res.communicate()
-            if res.returncode != 0:
-                logging.info('BudsDevice reset detected')
-                return True
-            elif (time.time() - start_time) > timeout:
-                logging.info('Timeout waiting for device to reset.....')
-                return False
-            else:
-                time.sleep(.5)
-
-    def set_in_case(self, reconnect=True):
-        """Simulates setting apollo in case and wait for device to come up.
-
-        Args:
-            reconnect: bool - if method should block until reconnect
-        """
-        logging.info('Setting device in case')
-        out = self.send('Pow 2')
-        for i in out:
-            if 'No OTA wakeup condition' in i:
-                logging.info('No wake up condition.')
-            elif 'STM Wakeup 10s' in i:
-                logging.info('Wake up condition detected.')
-        if reconnect:
-            self.wait_for_reset()
-            self.reconnect()
-
-
-class ParentDevice(BudsDevice):
-    """Wrapper object for Device that addresses b10 recovery and build flashing.
-
-    Recovery mechanism:
-    In case a serial connection could not be established to b10, the recovery
-    mechanism is activated  ONLY if'recover_device' is set to 'true' and
-    b29_serial is defined in config file. This helps recover a device that has a
-    bad build installed.
-    """
-
-    def __init__(self, serial_number, recover_device=False, b29_serial=None):
-        # if recover device parameter is supplied and there is an error in
-        # instantiating B10 try to recover device instantiating b10 has to fail
-        # at most $tries_before_recovery time before initiating a recovery
-        # try to run the recovery at most $recovery_times before raising Error
-        # after the first recovery attempt failure try to reset b29 each
-        # iteration
-        self.b29_device = None
-        if recover_device:
-            if b29_serial is None:
-                logging.error('B29 serial not defined')
-                raise Error(
-                    'Recovery failed because "b29_serial" definition not '
-                    'present in device manifest file')
-            else:
-                self.b29_device = B29Device(b29_serial)
-            tries_before_recovery = 5
-            recovery_tries = 5
-            for attempt in range(tries_before_recovery):
-                try:
-                    # build crash symptoms varies based on the nature of the
-                    # crash connectError is thrown if the device never shows up
-                    # in /dev/ sometimes device shows and can connect but
-                    # sending commands fails or crashes apollo in that case,
-                    # DeviceError is thrown
-                    super().__init__(serial_number, commander_port=None,
-                                     log_port=None, serial_logger=None)
-                    break
-                except (ConnectError, DeviceError) as ex:
-                    logging.warning(
-                        'Error initializing apollo object - # of attempt '
-                        'left : %d' % (tries_before_recovery - attempt - 1))
-                    if attempt + 1 >= tries_before_recovery:
-                        logging.error(
-                            'Retries exhausted - now attempting to restore '
-                            'golden image')
-                        for recovery_attempt in range(recovery_tries):
-                            if not self.b29_device.restore_golden_image():
-                                logging.error('Recovery failed - retrying...')
-                                self.b29_device.reset_charger()
-                                continue
-                            # try to instantiate now
-                            try:
-                                super().__init__(serial_number,
-                                                 commander_port=None,
-                                                 log_port=None,
-                                                 serial_logger=None)
-                                break
-                            except (ConnectError, DeviceError):
-                                if recovery_attempt == recovery_tries - 1:
-                                    raise Error(
-                                        'Recovery failed - ensure that there '
-                                        'is no mismatching serial numbers of '
-                                        'b29 and b10 is specified in config')
-                                else:
-                                    logging.warning(
-                                        'Recovery attempt failed - retrying...')
-                    time.sleep(2)
-        else:
-            super().__init__(serial_number, commander_port=None, log_port=None,
-                             serial_logger=None)
-        # set this to prevent sleep
-        self.set_stay_connected(1)
-
-    def get_info(self):
-        information_dictionary = {}
-        information_dictionary['type'] = self.dut_type
-        information_dictionary['serial'] = self.serial_number
-        information_dictionary['log port'] = self.log_port
-        information_dictionary['command port'] = self.commander_port
-        information_dictionary['bluetooth address'] = self.bluetooth_address
-        success, build_dict = self.get_version()
-        information_dictionary['build'] = build_dict
-        # Extract the build number as a separate key. Useful for BigQuery.
-        information_dictionary['firmware build number'] = build_dict.get(
-            'build', '9999')
-        information_dictionary['name'] = self.device_name
-        if self.b29_device:
-            information_dictionary['b29 serial'] = self.b29_device.serial
-            information_dictionary['b29 firmware'] = self.b29_device.fw_version
-            information_dictionary['b29 commander port'] = self.b29_device.port
-            information_dictionary[
-                'b29 app version'] = self.b29_device.app_version
-        return information_dictionary
-
-    def setup(self, **kwargs):
-        """
-
-        Args:
-            apollo_build: if specified, will be used in flashing the device to
-                          that build prior to running any of the tests. If not
-                          specified flashing is skipped.
-        """
-        if 'apollo_build' in kwargs and kwargs['apollo_build'] is not None:
-            build = kwargs['apollo_build']
-            X20_REGEX = re.compile(r'/google/data/')
-            if not os.path.exists(build) or os.stat(build).st_size == 0:
-                # if x20 path, retry on file-not-found error or if file size is
-                # zero b/c X20 path does not update immediately
-                if X20_REGEX.match(build):
-                    for i in range(20):
-                        # wait until file exists and size is > 0 w/ 6 second
-                        # interval on retry
-                        if os.path.exists(build) and os.stat(build).st_size > 0:
-                            break
-
-                        if i == 19:
-                            logging.error('Build path (%s) does not exist or '
-                                          'file size is 0 - aborted' % build)
-
-                            raise Error('Specified build path (%s) does not '
-                                        'exist or file size is 0' % build)
-                        else:
-                            logging.warning('Build path (%s) does not exist or '
-                                            'file size is 0 - retrying...' %
-                                            build)
-                            time.sleep(6)
-                else:
-                    raise Error('Specified build path (%s) does not exist or '
-                                'file size is 0' % build)
-                self.flash_from_file(file_name=build, reconnect=True)
-        else:
-            logging.info('Not flashing apollo.')
-
-    def teardown(self, **kwargs):
-        self.close()
-
-
-def _evt_hex(cmd):
-    return 'EvtHex ' + apollo_sink_events.SINK_EVENTS[cmd]
-
-
-def _to_ascii(orig):
-    # Returned value need to be byte swapped. Remove last octet if it is 0.
-    result = _byte_swap(orig)
-    result = result[:-2] if result[-2:] == '00' else result
-    return bytearray.fromhex(result).decode()
-
-
-def _byte_swap(orig):
-    """Simple function to swap bytes order.
-
-    Args:
-        orig: original string
-
-    Returns:
-        a string with bytes swapped.
-        eg. orig = '6557276920736952006f'.
-        After swap, return '57656927732052696f00'
-    """
-    return ''.join(
-        sum([(c, d, a, b) for a, b, c, d in zip(*[iter(orig)] * 4)], ()))
diff --git a/src/antlion/controllers/buds_lib/apollo_utils.py b/src/antlion/controllers/buds_lib/apollo_utils.py
deleted file mode 100644
index 98c9be8..0000000
--- a/src/antlion/controllers/buds_lib/apollo_utils.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from antlion import signals
-from antlion.controllers.buds_lib import apollo_lib
-
-AVRCP_WAIT_TIME = 3
-
-
-def get_serial_object(pri_ad, serial_device):
-    """This function will creates object for serial device connected.
-
-    Args:
-        pri_ad: Android device.
-        serial_device: serial device connected.
-
-    Returns:
-        object of serial device, otherwise Abort the class.
-    """
-    devices = apollo_lib.get_devices()
-    for device in devices:
-        if device['serial_number'] in serial_device:
-            return apollo_lib.BudsDevice(device['serial_number'])
-    pri_ad.log.error('Apollo device not found')
-    raise signals.TestAbortAll('Apollo device not found')
-
-
-def avrcp_actions(pri_ad, buds_device):
-    """Performs avrcp controls like volume up, volume down
-
-    Args:
-        pri_ad: Android device.
-        buds_device: serial device object to perform avrcp actions.
-
-    Returns:
-        True if successful, otherwise otherwise raises Exception.
-    """
-    pri_ad.log.debug("Setting voume to 0")
-    pri_ad.droid.setMediaVolume(0)
-    current_volume = pri_ad.droid.getMediaVolume()
-    pri_ad.log.info('Current volume to {}'.format(current_volume))
-    for _ in range(5):
-        buds_device.volume('Up')
-        time.sleep(AVRCP_WAIT_TIME)
-    pri_ad.log.info('Volume increased to {}'.format(
-        pri_ad.droid.getMediaVolume()))
-    if current_volume == pri_ad.droid.getMediaVolume():
-        pri_ad.log.error('Increase volume failed')
-        raise signals.TestFailure("Increase volume failed")
-    current_volume = pri_ad.droid.getMediaVolume()
-    for _ in range(5):
-        buds_device.volume('Down')
-        time.sleep(AVRCP_WAIT_TIME)
-    pri_ad.log.info('Volume decreased to {}'.format(
-        pri_ad.droid.getMediaVolume()))
-    if current_volume == pri_ad.droid.getMediaVolume():
-        pri_ad.log.error('Decrease volume failed')
-        raise signals.TestFailure("Decrease volume failed")
-    return True
diff --git a/src/antlion/controllers/buds_lib/b29_lib.py b/src/antlion/controllers/buds_lib/b29_lib.py
deleted file mode 100644
index 32839c1..0000000
--- a/src/antlion/controllers/buds_lib/b29_lib.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Class definition of B29 device for controlling the device.
-
-B29 is an engineering device with serial capabilities. It is almost like
-b20 except it has additional features that allow sending commands
-to b10 via one-wire and to pull logs from b10 via one-wire.
-
-Please see https://docs.google.com/document/d/17yJeJRNWxv5E9
-fBvw0sXkgwCBkshU_l4SxWkKgAxVmk/edit for details about available operations.
-"""
-
-import os
-import re
-import time
-from logging import Logger
-
-from antlion import utils
-from antlion.controllers.buds_lib import tako_trace_logger
-
-logging = tako_trace_logger.TakoTraceLogger(Logger(__file__))
-DEVICE_REGEX = (
-    r'_(?P<device_serial>[A-Z0-9]+)-(?P<interface>\w+)\s->\s'
-    r'(\.\./){2}(?P<port>\w+)'
-)
-# TODO: automate getting the latest version from x20
-DEBUG_BRIDGE = ('/google/data/ro/teams/wearables/apollo/ota/jenkins-presubmit/'
-                'ovyalov/master/apollo-sw/CL14060_v2-build13686/v13686/'
-                'automation/apollo_debug_bridge/linux2/apollo_debug_bridge')
-B29_CHIP = 'Cypress_Semiconductor_USBUART'
-
-
-# TODO:
-# as the need arises, additional functionalities of debug_bridge should be
-# integrated
-# TODO:
-# https://docs.google.com/document/d/17yJeJRNWxv5E9fBvw0sXkgwCBkshU_
-# l4SxWkKgAxVmk/edit
-
-class B29Error(Exception):
-    """Module Level Error."""
-
-
-def get_b29_devices():
-    """ Get all available B29 devices.
-
-    Returns:
-      (list) A list of available devices (ex: ['/dev/ttyACM4',...]) or empty
-      list if none found
-    """
-    devices = []
-    result = os.popen('ls -l /dev/serial/by-id/*%s*' % B29_CHIP).read()
-    for line in result.splitlines():
-        match = re.search(DEVICE_REGEX, line)
-        device_serial = match.group('device_serial')
-        log_port = None
-        commander_port = '/dev/' + match.group('port')
-        device = {
-            'commander_port': commander_port,
-            'log_port': log_port,
-            'serial_number': device_serial
-        }
-        devices.append(device)
-    return devices
-
-
-class B29Device(object):
-    """Class to control B29 device."""
-
-    def __init__(self, b29_serial):
-        """ Class to control B29 device
-        Args: String type of serial number (ex: 'D96045152F121B00'
-        """
-        self.serial = b29_serial
-        b29_port = [d['commander_port'] for d in get_b29_devices() if
-                    d['serial_number'] == b29_serial]
-        if not b29_port:
-            logging.error("unable to find b29 with serial number %s" %
-                          b29_serial)
-            raise B29Error(
-                "Recovery failed because b29_serial specified in device "
-                "manifest file is not found or invalid")
-        self.port = b29_port[0]
-        self.ping_match = {'psoc': r'Pings: tx=[\d]* rx=[1-9][0-9]',
-                           'csr': r'count=100, sent=[\d]*, received=[1-9][0-9]',
-                           'charger': r'Pings: tx=[\d]* rx=[1-9][0-9]'}
-        self.fw_version = self._get_version('fw')
-        self.app_version = self._get_version('app')
-
-    def _get_version(self, type='fw'):
-        """ Method to get version of B29
-        Returns:
-            String version if found (ex: '0006'), None otherwise
-        """
-        command = '--serial={}'.format(self.port)
-        debug_bridge_process = self._send_command(command=command)
-        if type == 'fw':
-            version_match = re.compile(r'CHARGER app version: version=([\d]*)')
-        elif type == 'app':
-            version_match = re.compile(r'APP VERSION: ([\d]*)')
-        version_str = self._parse_output_of_running_process(
-            debug_bridge_process, version_match)
-        debug_bridge_process.kill()
-        if version_str:
-            match = version_match.search(version_str)
-            version = match.groups()[0]
-            return version
-        return None
-
-    def _parse_output_of_running_process(self, subprocess, match, timeout=30):
-        """ Parses the logs from subprocess objects and checks to see if a
-        match is found within the allotted time
-        Args:
-            subprocess: object returned by _send_command (which is the same as
-            bject returned by subprocess.Popen()) match: regex match object
-            (what is returned by re.compile(r'<regex>') timeout: int - time to
-            keep retrying before bailing
-
-        """
-        start_time = time.time()
-        success_match = re.compile(match)
-        while start_time + timeout > time.time():
-            out = subprocess.stderr.readline()
-            if success_match.search(out):
-                return out
-            time.sleep(.5)
-        return False
-
-    def _send_command(self, command):
-        """ Send command to b29 using apollo debug bridge
-        Args:
-          command: The command for apollo debug to execute
-        Returns:
-          subprocess object
-        """
-        return utils.start_standing_subprocess(
-            '{} {} {}'.format(DEBUG_BRIDGE, '--rpc_port=-1', command),
-            shell=True)
-
-    def restore_golden_image(self):
-        """ Start a subprocess that calls the debug-bridge executable with
-        options that restores golden image of b10 attached to the b29. The
-        recovery restores the 'golden image' which is available in b10 partition
-         8. The process runs for 120 seconds which is adequate time for the
-         recovery to have completed.
-        """
-        # TODO:
-        # because we are accessing x20, we need to capture error resulting from
-        #  expired prodaccess and report it explicitly
-        # TODO:
-        # possibly file not found error?
-
-        # start the process, wait for two minutes and kill it
-        logging.info('Restoring golden image...')
-        command = '--serial=%s --debug_spi=dfu --sqif_partition=8' % self.port
-        debug_bridge_process = self._send_command(command=command)
-        success_match = re.compile('DFU on partition #8 successfully initiated')
-        if self._parse_output_of_running_process(debug_bridge_process,
-                                                 success_match):
-            logging.info('Golden image restored successfully')
-            debug_bridge_process.kill()
-            return True
-        logging.warning('Failed to restore golden image')
-        debug_bridge_process.kill()
-        return False
-
-    def ping_component(self, component, timeout=30):
-        """ Send ping to the specified component via B290
-        Args:
-            component = 'csr' or 'psoc' or 'charger'
-        Returns:
-            True if successful and False otherwise
-        """
-        if component not in ('csr', 'psoc', 'charger'):
-            raise B29Error('specified parameter for component is not valid')
-        logging.info('Pinging %s via B29...' % component)
-        command = '--serial={} --ping={}'.format(self.port, component)
-        debug_bridge_process = self._send_command(command=command)
-        if self._parse_output_of_running_process(debug_bridge_process,
-                                                 self.ping_match[component],
-                                                 timeout):
-            logging.info('Ping passes')
-            debug_bridge_process.kill()
-            return True
-        else:
-            logging.warning('Ping failed')
-            debug_bridge_process.kill()
-            return False
-
-    def reset_charger(self):
-        """ Send reset command to B29
-        Raises: TimeoutError (lib.utils.TimeoutError) if the device does not
-        come back within 120 seconds
-        """
-        # --charger_reset
-        if int(self.fw_version) >= 6:
-            logging.info('Resetting B29')
-            command = '--serial={} --charger_reset'.format(self.port)
-            reset_charger_process = self._send_command(command=command)
-            time.sleep(2)
-            reset_charger_process.kill()
-            logging.info('Waiting for B29 to become available..')
-            utils.wait_until(lambda: self.ping_component('charger'), 120)
-        else:
-            logging.warning('B20 firmware version %s does not support '
-                            'charger_reset argument' % self.fw_version)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/__init__.py b/src/antlion/controllers/buds_lib/dev_utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_decoder.py b/src/antlion/controllers/buds_lib/dev_utils/apollo_log_decoder.py
deleted file mode 100644
index fafb05a..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_decoder.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Decodes the protobufs described in go/apollo-qa-tracing-design."""
-
-import base64
-import binascii
-import struct
-
-from antlion.controllers.buds_lib.dev_utils.proto.gen import apollo_qa_pb2
-from antlion.controllers.buds_lib.dev_utils.proto.gen import audiowear_pb2
-
-
-def to_dictionary(proto):
-    proto_dic = {}
-    msg = [element.split(':') for element in str(proto).split('\n') if element]
-    for element in msg:
-        key = element[0].strip()
-        value = element[1].strip()
-        proto_dic[key] = value
-    return proto_dic
-
-
-def is_automation_protobuf(logline):
-    return logline.startswith('QA_MSG|')
-
-
-def decode(logline):
-    """Decode the logline.
-
-    Args:
-      logline: String line with the encoded message.
-
-    Returns:
-      String value with the decoded message.
-    """
-    decoded = None
-    decoders = {'HEX': binascii.unhexlify, 'B64': base64.decodebytes}
-    msgs = {
-        apollo_qa_pb2.TRACE:
-            apollo_qa_pb2.ApolloQATrace,
-        apollo_qa_pb2.GET_VER_RESPONSE:
-            apollo_qa_pb2.ApolloQAGetVerResponse,
-        apollo_qa_pb2.GET_CODEC_RESPONSE:
-            apollo_qa_pb2.ApolloQAGetCodecResponse,
-        apollo_qa_pb2.GET_DSP_STATUS_RESPONSE:
-            apollo_qa_pb2.ApolloQAGetDspStatusResponse,
-    }
-
-    if is_automation_protobuf(logline):
-        _, encoding, message = logline.split("|", 2)
-        message = message.rstrip()
-        if encoding in decoders.keys():
-            message = decoders[encoding](message)
-            header = message[0:4]
-            serialized = message[4:]
-            if len(header) == 4 and len(serialized) == len(message) - 4:
-                msg_group, msg_type, msg_len = struct.unpack('>BBH', header)
-                if (len(serialized) == msg_len and
-                        msg_group == audiowear_pb2.APOLLO_QA):
-                    proto = msgs[msg_type]()
-                    proto.ParseFromString(serialized)
-                    decoded = to_dictionary(proto)
-    return decoded
diff --git a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_regex.py b/src/antlion/controllers/buds_lib/dev_utils/apollo_log_regex.py
deleted file mode 100644
index b4dd58a..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_regex.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Apollo's event logs regexp for each button action."""
-
-EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)\r\n')
-VOL_CHANGE_REGEX = (
-  r'(?P<time_stamp>\d+)\sVolume = (?P<vol_level>\d+)(.*)\r\n')
-VOLUP_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)3202(.*)\r\n')
-VOLDOWN_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)320a(.*)\r\n')
-AVRCP_PLAY_REGEX = (r'(?P<time_stamp>\d+)\sAVRCP '
-                    r'play\r\n')
-AVRCP_PAUSE_REGEX = (r'(?P<time_stamp>\d+)\sAVRCP '
-                     r'paused\r\n')
-MIC_OPEN_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3206\](.*)\r\n')
-MIC_CLOSE_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3207\](.*)\r\n')
-PREV_TRACK_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3208\](.*)\r\n')
-PREV_CHANNEL_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3209\](.*)\r\n')
-NEXT_TRACK_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3200\](.*)\r\n')
-NEXT_CHANNEL_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3201\](.*)\r\n')
-FETCH_NOTIFICATION_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3205\](.*)\r\n')
-VOICE_CMD_COMPLETE_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])\sDspOnVoiceCommandComplete\r\n')
-VOICE_CMD_START_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])\sDspStartVoiceCommand(.*)\r\n')
-MIC_OPEN_PROMT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)AudioPromptPlay 33(.*)\r\n')
-MIC_CLOSE_PROMT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)AudioPromptPlay 34(.*)\r\n')
-POWER_ON_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z]) --hello--(.*)PowerOn(.*)\r\n')
-POWER_OFF_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z]) EvtAW:320d(.*)\r\n')
diff --git a/src/antlion/controllers/buds_lib/dev_utils/apollo_sink_events.py b/src/antlion/controllers/buds_lib/dev_utils/apollo_sink_events.py
deleted file mode 100644
index fcba1fd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/apollo_sink_events.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Apollo PS Keys and User Sink Events."""
-
-# Persistent Store (PS) Keys from rio_all_merged.psr.
-
-PSKEY = {
-    'PSKEY_BDADDR': '0x0001',
-    'PSKEY_DEVICE_NAME': '0x0108',
-    'PSKEY_DEEP_SLEEP_STATE': '0x0229',
-    'PSKEY_USB_VERSION': '0x02bc',
-    'PSKEY_USB_DEVICE_CLASS_CODES': '0x02bd',
-    'PSKEY_USB_VENDOR_ID': '0x02be',
-    'PSKEY_USB_PRODUCT_ID': '0x02bf',
-    'PSKEY_USB_PRODUCT_STRING': '0x02c2',
-    'PSKEY_USB_SERIAL_NUMBER_STRING': '0x02c3',
-    'PSKEY_EARCON_VERSION': '0x28b'
-}
-
-# Rio logging features from rio_log.h.
-
-LOG_FEATURES = {
-    'ALL': '0xffff',
-    'VERBOSE': '0x0001',
-    'TEST': '0x0002',
-    'CSR': '0x0004',
-    'DEBUG': '0x0008',
-    'INFO': '0x0010',
-    'ERROR': '0x0020',
-    'TIME_STAMP': '0x0040',
-}
-
-# Supported events from sink_events.h."""
-
-SINK_EVENTS = {
-    'EventUsrMicOpen': '0x3206',
-    'EventUsrMicClose': '0x3207',
-    'EventUsrPowerOn': '0x4001',
-    'EventUsrPowerOff': '0x4002',
-    'EventUsrEnterPairing': '0x4003',
-    'EventUsrInitateVoiceDial': '0x4004',
-    'EventUsrInitateVoiceDial_AG2': '0x4005',
-    'EventUsrLastNumberRedial': '0x4006',
-    'EventUsrLastNumberRedial_AG2': '0x4007',
-    'EventUsrAnswer': '0x4008',
-    'EventUsrReject': '0x4009',
-    'EventUsrCancelEnd': '0x400A',
-    'EventUsrTransferToggle': '0x400B',
-    'EventUsrMuteToggle': '0x400C',
-    'EventUsrVolumeUp': '0x400D',
-    'EventUsrVolumeDown': '0x400E',
-    'EventUsrVolumeToggle': '0x400F',
-    'EventUsrThreeWayReleaseAllHeld': '0x4010',
-    'EventUsrThreeWayAcceptWaitingReleaseActive': '0x4011',
-    'EventUsrThreeWayAcceptWaitingHoldActive': '0x4012',
-    'EventUsrThreeWayAddHeldTo3Way': '0x4013',
-    'EventUsrThreeWayConnect2Disconnect': '0x4014',
-    'EventUsrLedsOnOffToggle': '0x4015',
-    'EventUsrLedsOn': '0x4016',
-    'EventUsrLedsOff': '0x4017',
-    'EventUsrEstablishSLC': '0x4018',
-    'EventUsrMuteOn': '0x4019',
-    'EventUsrMuteOff': '0x401A',
-    'EventUsrEnterTXContTestMode': '0x401B',
-    'EventUsrEnterDUTState': '0x401C',
-    'EventUsrBassBoostEnableDisableToggle': '0x401D',
-    'EventUsrPlaceIncomingCallOnHold': '0x401E',
-    'EventUsrAcceptHeldIncomingCall': '0x401F',
-    'EventUsrRejectHeldIncomingCall': '0x4020',
-    'EventUsrEnterDFUMode': '0x4021',
-    'EventUsrEnterDriverlessDFUMode': '0x4022',
-    'EventUsrEnterServiceMode': '0x4023',
-    'EventUsrAudioPromptsOn': '0x4024',
-    'EventUsrAudioPromptsOff': '0x4025',
-    'EventUsrDialStoredNumber': '0x4026',
-    'EventUsrUpdateStoredNumber': '0x4027',
-    'EventUsrRestoreDefaults': '0x4028',
-    'EventUsrConfirmationAccept': '0x4029',
-    'EventUsrConfirmationReject': '0x402A',
-    'EventUsrSelectAudioPromptLanguageMode': '0x402B',
-    'EventUsrSwitchAudioMode': '0x402F',
-    'EventUsrButtonLockingOn': '0x4030',
-    'EventUsrButtonLockingOff': '0x4031',
-    'EventUsrButtonLockingToggle': '0x4032',
-    'EventUsrRssiPair': '0x4034',
-    'EventUsrBassBoostOn': '0x4035',
-    'EventUsrBassBoostOff': '0x4036',
-    'EventUsr3DEnhancementOn': '0x4037',
-    'EventUsr3DEnhancementOff': '0x4038',
-    'EventUsrSelectAudioSourceNext': '0x4039',
-    'EventUsrSelectAudioSourceAnalog': '0x403A',
-    'EventUsrSelectAudioSourceUSB': '0x403B',
-    'EventUsrSelectAudioSourceAG1': '0x403C',
-    'EventUsrSelectAudioSourceAG2': '0x403D',
-    'EventUsrSelectFMAudioSource': '0x403E',
-    'EventUsrSelectAudioSourceNone': '0x403F',
-    'EventUsrPbapDialIch': '0x4040',
-    'EventUsrPbapDialMch': '0x4041',
-    'EventUsrIntelligentPowerManagementOn': '0x4042',
-    'EventUsrIntelligentPowerManagementOff': '0x4043',
-    'EventUsrIntelligentPowerManagementToggle': '0x4044',
-    'EventUsrAvrcpPlayPause': '0x4045',
-    'EventUsrAvrcpStop': '0x4046',
-    'EventUsrAvrcpSkipForward': '0x4047',
-    'EventUsrAvrcpSkipBackward': '0x4048',
-    'EventUsrAvrcpFastForwardPress': '0x4049',
-    'EventUsrAvrcpFastForwardRelease': '0x404A',
-    'EventUsrAvrcpRewindPress': '0x404B',
-    'EventUsrAvrcpRewindRelease': '0x404C',
-    'EventUsrAvrcpShuffleOff': '0x404D',
-    'EventUsrAvrcpShuffleAllTrack': '0x404E',
-    'EventUsrAvrcpShuffleGroup': '0x404F',
-    'EventUsrAvrcpRepeatOff': '0x4050',
-    'EventUsrAvrcpRepeatSingleTrack': '0x4051',
-    'EventUsrAvrcpRepeatAllTrack': '0x4052',
-    'EventUsrAvrcpRepeatGroup': '0x4053',
-    'EventUsrAvrcpPlay': '0x4054',
-    'EventUsrAvrcpPause': '0x4055',
-    'EventUsrAvrcpToggleActive': '0x4056',
-    'EventUsrAvrcpNextGroupPress': '0x4057',
-    'EventUsrAvrcpPreviousGroupPress': '0x4058',
-    'EventUsrPbapSetPhonebook': '0x4059',
-    'EventUsrPbapBrowseEntry': '0x405A',
-    'EventUsrPbapBrowseList': '0x405B',
-    'EventUsrPbapDownloadPhonebook': '0x405C',
-    'EventUsrPbapSelectPhonebookObject': '0x405D',
-    'EventUsrPbapBrowseComplete': '0x405E',
-    'EventUsrPbapGetPhonebookSize': '0x405F',
-    'EventUsrUsbPlayPause': '0x4060',
-    'EventUsrUsbStop': '0x4061',
-    'EventUsrUsbFwd': '0x4062',
-    'EventUsrUsbBack': '0x4063',
-    'EventUsrUsbMute': '0x4064',
-    'EventUsrUsbLowPowerMode': '0x4065',
-    'EventUsrTestModeAudio': '0x4066',
-    'EventUsrTestModeTone': '0x4067',
-    'EventUsrTestModeKey': '0x4068',
-    'EventUsrTestDefrag': '0x4069',
-    'EventUsrDebugKeysToggle': '0x406A',
-    'EventUsrSpeechRecognitionTuningStart': '0x406B',
-    'EventUsrWbsTestSetCodecs': '0x406C',
-    'EventUsrWbsTestOverrideResponse': '0x406D',
-    'EventUsrWbsTestSetCodecsSendBAC': '0x406E',
-    'EventUsrCreateAudioConnection': '0x406F',
-    'EventUsrSwapA2dpMediaChannel': '0x407F',
-    'EventUsrExternalMicConnected': '0x4080',
-    'EventUsrExternalMicDisconnected': '0x4081',
-    'EventUsrSSROn': '0x4082',
-    'EventUsrSSROff': '0x4083',
-    'EventUsrPeerSessionInquire': '0x4089',
-    'EventUsrPeerSessionConnDisc': '0x408A',
-    'EventUsrPeerSessionEnd': '0x408B',
-    'EventUsrBatteryLevelRequest': '0x408C',
-    'EventUsrVolumeOrientationNormal': '0x408D',
-    'EventUsrVolumeOrientationInvert': '0x408E',
-    'EventUsrResetPairedDeviceList': '0x408F',
-    'EventUsrEnterDutMode': '0x4090',
-    'EventUsr3DEnhancementEnableDisableToggle': '0x4091',
-    'EventUsrRCVolumeUp': '0x4092',
-    'EventUsrRCVolumeDown': '0x4093',
-    'EventUsrEnterBootMode2': '0x4094',
-    'EventUsrChargerConnected': '0x4095',
-    'EventUsrChargerDisconnected': '0x4096',
-    'EventUsrSubwooferDisconnect': '0x4097',
-    'EventUsrAnalogAudioConnected': '0x4098',
-    'EventUsrAnalogAudioDisconnected': '0x4099',
-    'EventUsrGaiaDFURequest': '0x409A',
-    'EventUsrStartIRLearningMode': '0x409B',
-    'EventUsrStopIRLearningMode': '0x409C',
-    'EventUsrClearIRCodes': '0x409D',
-    'EventUsrUserEqOn': '0x409E',
-    'EventUsrUserEqOff': '0x409F',
-    'EventUsrUserEqOnOffToggle': '0x40A0',
-    'EventUsrSpdifAudioConnected': '0x40AD',
-    'EventUsrSpdifAudioDisconnected': '0x40AE',
-    'EventUsrSelectAudioSourceSpdif': '0x40AF',
-    'EventUsrChangeAudioRouting': '0x40B0',
-    'EventUsrMasterDeviceTrimVolumeUp': '0x40B1',
-    'EventUsrMasterDeviceTrimVolumeDown': '0x40B2',
-    'EventUsrSlaveDeviceTrimVolumeUp': '0x40B3',
-    'EventUsrSlaveDeviceTrimVolumeDown': '0x40B4',
-    'EventUsrEstablishPeerConnection': '0x40B5',
-    'EventUsrTwsQualificationEnablePeerOpen': '0x40B7',
-    'EventUsrBleStartBonding': '0x40D0',
-    'EventUsrBleSwitchPeripheral': '0x40D1',
-    'EventUsrBleSwitchCentral': '0x40D2',
-    'EventUsrImmAlertStop': '0x40D3',
-    'EventUsrLlsAlertStop': '0x40D4',
-    'EventUsrFindMyRemoteImmAlertMild': '0x40D5',
-    'EventUsrFindMyRemoteImmAlertHigh': '0x40D6',
-    'EventUsrFindMyPhoneImmAlertMild': '0x40D7',
-    'EventUsrFindMyPhoneImmAlertHigh': '0x40D8',
-    'EventUsrFindMyPhoneRemoteImmAlertHigh': '0x40D9',
-    'EventUsrPartyModeOn': '0x40DA',
-    'EventUsrPartyModeOff': '0x40DB',
-    'EventUsrBleDeleteDevice': '0x40EC',
-    'EventUsrAvrcpNextGroupRelease': '0x40ED',
-    'EventUsrAvrcpPreviousGroupRelease': '0x40EE',
-    'EventUsrTwsQualificationVolUp': '0x40EF',
-    'EventUsrTwsQualificationVolDown': '0x40F0',
-    'EventUsrStartA2DPStream': '0x40F1',
-    'EventUsrPbapBrowseListByName': '0x40F2',
-    'EventUsrTwsQualificationSetAbsVolume': '0x40F3',
-    'EventUsrTwsQualificationPlayTrack': '0x40F4',
-    'EventUsrBleHidExtraConfig': '0x40F5',
-    'EventUsrTwsQualificationAVRCPConfigureDataSize': '0x40F6',
-}
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/Makefile b/src/antlion/controllers/buds_lib/dev_utils/proto/Makefile
deleted file mode 100644
index 8509cd6..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/Makefile
+++ /dev/null
@@ -1,4 +0,0 @@
-all: nanopb_pb2.py plugin_pb2.py
-
-%_pb2.py: %.proto
-	protoc -I. --python_out=. $<
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/apollo_qa.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/apollo_qa.proto
deleted file mode 100644
index 0db32e9..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/apollo_qa.proto
+++ /dev/null
@@ -1,175 +0,0 @@
-/* Forward & backward compatibility practices must be followed.  This means:
-   a) Never re-use an enum if the semantics are different.
-   b) Never alter the semantic meaning.  If needed, simply deprecate
-      old message/field/enum & start using new one.  If deprecated message
-      is no longer used in code make sure to communicate this to QA.
-   c) Prefer 3-stage migrations if possible: add new message/enum/field & start
-      sending it, make sure QA has switched to new message/enum/field, remove old
-      message/enum/field.
-   Fields should start out required unless they are optional in the code.  They should
-   become optional when deprecated (make sure to update QA automation first to expect the
-   field to be optional) or removed.  Make sure to never re-use field ids unless
-   the field is being resurrected.
- */
-syntax = "proto2";
-//package audiowear;
-package apollo.lib.apollo_dev_util_lib.proto;
-import "nanopb.proto";
-//import "third_party/nanopb/nanopb.proto";
-
-
-option java_package = "com.google.android.bisto.nano";
-option java_outer_classname = "ApolloQA";
-
-enum ApolloQAMessageType {
-  TRACE = 1;
-  GET_VER_RESPONSE = 2;
-  GET_CODEC_RESPONSE = 3;
-  GET_DSP_STATUS_RESPONSE = 4;
-  FACTORY_PLAY_SOUND = 5;
-  FACTORY_INFO_REQUEST = 6;
-  FACTORY_INFO_RESPONSE = 7;
-}
-enum TraceId {
-  // data[0] = partition# erased.
-  OTA_ERASE_PARTITION = 1;
-  // data[0] = partition# we will write to.
-  // data[1] = expected length we will write to partition
-  OTA_START_PARTITION_WRITE = 2;
-  // data[0] = partition# written
-  // data[1] = actual written length
-  OTA_FINISHED_PARTITION_WRITE = 3;
-  // our custom signature validation has begun
-  OTA_SIGNATURE_START = 4;
-  // our custom signature validation rejected the image
-  OTA_SIGNATURE_FAILURE = 5;
-  // triggering CSR to reboot & apply DFU
-  OTA_TRIGGERING_LOADER = 6;
-  // the CSR loader rejected the image
-  OTA_LOADER_VERIFY_FAILED = 7;
-  // progress update.
-  // data[0] = num bytes received
-  // data[1] = num bytes expected
-  OTA_PROGRESS = 8;
-  OTA_ABORTED = 9;
-  // up to 10: reserved for OTA
-  // data[0] = AvrcpPlayStatus as integer
-  AVRCP_PLAY_STATUS_CHANGE = 10;
-  /* data[0] = new volume level (int16 stored in uint32)
-     data[1] = new volume level in dB (int16 stored in uint32)
-     data[2] = previous volume level (int16 stored in uint32)
-     easy conversion in python:
-       new_volume = ctpyes.c_short(ctypes.c_uint(data[0]).value).value
-       new_volume_db = ctpyes.c_short(ctypes.c_uint(data[1]).value).value
-  */
-  VOLUME_CHANGE = 11;
-  /* data[0] = entry number of command */
-  COMMANDER_RECV_COMMAND = 12;
-  COMMANDER_FINISH_COMMAND = 13;
-  /* currently no information about the rejected command */
-  COMMANDER_REJECT_COMMAND = 14;
-}
-/* Note: FWD_SEEK & REV_SEEK are bit-wise or'ed into the status.
-   Use SEEK_MASK first to read the seek or mask it out to get the other
-   states).  Do not & with individual states for comparison as aside from
-   seek the other states are not a bitwise-mask.
-*/
-enum AvrcpPlayStatus {
-  STOPPED = 0;
-  PLAYING = 1;
-  PAUSED = 2;
-  FWD_SEEK = 8;
-  REV_SEEK = 16;
-  ERROR = 5;
-  /* & this with the status to compare against FWD_SEEK/REV_SEEK.
-     & with the complement of this value to get the other states */
-  SEEK_MASK = 24;
-}
-/* These messages are internal trace-points for QA to do whitebox validation.
-   However, developers should feel free to remove trace-points if they
-   no longer make sense (but communicate to QA to prep this ahead-of-time). */
-message ApolloQATrace {
-  // all messages should have a timestamp field and it will get auto-populated
-  // (no need to set it at the call-site).
-  required uint32 timestamp = 1;
-  // this is the event that occured internally
-  required TraceId id = 2;
-  // this is some optional data that depends on the traceid.
-  // if not documented then no fields will be written.
-  repeated uint32 data = 3 [packed = true, (nanopb).max_count = 5];
-}
-enum PreviousBootStatus {
-  OTA_SUCCESS = 1; /* previous boot OK */
-  OTA_ERROR = 2; /* previous OTA failed */
-}
-//Next ID: 10
-message ApolloQAGetVerResponse {
-  required uint32 timestamp = 1;
-  required uint32 csr_fw_version = 2; // not implemented for now
-  required bool csr_fw_debug_build = 3; // not implemented for now
-  required uint32 vm_build_number = 4;
-  required bool vm_debug_build = 5;
-  required uint32 psoc_version = 6;
-  // the build label sent to AGSA. This should just be an amalgum of the broken-down
-  // info above. Aside from X.Y.Z prefix, do not parse this for anything as it is
-  // free-form text.
-  required string build_label = 7 [(nanopb).max_size = 32];
-  // if not set then info wasn't available.
-  optional PreviousBootStatus last_ota_status = 8;
-  required uint32 charger_version = 9;
-  optional uint32 expected_psoc_version = 10;
-}
-enum ApolloQAA2dpCodec {
-  AAC = 1;
-  SBC = 2;
-}
-message ApolloQAGetCodecResponse {
-  required uint32 timestamp = 1;
-  // if not set then unknown codec (error).
-  optional ApolloQAA2dpCodec codec = 2;
-}
-enum SinkState {
-  LIMBO = 0;
-  CONNECTABLE = 1;
-  DISCOVERABLE = 2;
-  CONNECTED = 3;
-  OUTGOING_CALLS_ESTABLISH = 4;
-  INCOMING_CALLS_ESTABLISH = 5;
-  ACTIVE_CALL_SCO = 6;
-  TEST_MODE = 7;
-  THREE_WAY_CALL_WAITING = 8;
-  THREE_WAY_CALL_ON_HOLD = 9;
-  THREE_WAY_MULTICALL = 10;
-  INCOMING_CALL_ON_HOLD = 11;
-  ACTIVE_CALL_NO_SCO = 12;
-  A2DP_STREAMING = 13;
-  DEVICE_LOW_BATTERY = 14;
-}
-message ApolloQAGetDspStatusResponse {
-  required uint32 timestamp = 1;
-  required bool is_dsp_loaded = 2;
-  required SinkState sink_state = 3;
-  /**
-   * This is a bit-wise mask.
-   * see AudioWearFeatureBits audiowear_plugin_state.h
-   * for the definition.
-   */
-  required uint32 features_active = 4;
-}
-message ApolloQAFactoryPlaySound {
-  enum PromptType {
-    PROMPT_TYPE_BT_CONNECTED = 1;
-    PROMPT_TYPE_IN_EAR = 2;
-  }
-  required PromptType prompt = 1;
-}
-message ApolloQAFactoryInfoRequest {
-}
-message ApolloQAFactoryInfoResponse {
-  required uint32 timestamp = 1;
-  optional int32 crystal_trim = 2 [(nanopb).int_size = IS_16];
-  optional bool crash_dump_exists = 3;
-  optional bool is_developer_mode_enabled = 4;
-  optional bool is_always_connected = 5;
-  optional uint32 hwid = 6;
-}
\ No newline at end of file
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/audiowear.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/audiowear.proto
deleted file mode 100644
index 8f825bd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/audiowear.proto
+++ /dev/null
@@ -1,25 +0,0 @@
-syntax = "proto2";
-
-//package audiowear;
-package apollo.lib.apollo_dev_util_lib.proto;
-
-option java_package = "com.google.android.bisto.nano";
-option java_outer_classname = "Protocol";
-
-enum MessageGroup {
-    UNKNOWN_MESSAGE_GROUP = 0;
-    DEVICE_INPUT = 1;
-    OTA = 2;
-    DEVICE_CAPABILITY = 3;
-    DEVICE_STATUS = 4;
-    LOGGING = 5;
-    SENSORS = 6;
-    COMPANION_STATUS = 7;
-    DEVICE_COMMAND = 8;
-    BISTO_SETTINGS = 9;
-    WELLNESS = 10;
-    TEST = 11;
-    BLE_SERVICE = 12;
-    APOLLO_QA = 126;
-    TRANSLATE = 127;
-}
\ No newline at end of file
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py
deleted file mode 100644
index fefcfe4..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: apollo_qa.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-import nanopb_pb2 as nanopb__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x61pollo_qa.proto\x12$apollo.lib.apollo_dev_util_lib.proto\x1a\x0cnanopb.proto\"t\n\rApolloQATrace\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x39\n\x02id\x18\x02 \x02(\x0e\x32-.apollo.lib.apollo_dev_util_lib.proto.TraceId\x12\x15\n\x04\x64\x61ta\x18\x03 \x03(\rB\x07\x10\x01\x92?\x02\x10\x05\"\xcd\x02\n\x16\x41polloQAGetVerResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x16\n\x0e\x63sr_fw_version\x18\x02 \x02(\r\x12\x1a\n\x12\x63sr_fw_debug_build\x18\x03 \x02(\x08\x12\x17\n\x0fvm_build_number\x18\x04 \x02(\r\x12\x16\n\x0evm_debug_build\x18\x05 \x02(\x08\x12\x14\n\x0cpsoc_version\x18\x06 \x02(\r\x12\x1a\n\x0b\x62uild_label\x18\x07 \x02(\tB\x05\x92?\x02\x08 \x12Q\n\x0flast_ota_status\x18\x08 \x01(\x0e\x32\x38.apollo.lib.apollo_dev_util_lib.proto.PreviousBootStatus\x12\x17\n\x0f\x63harger_version\x18\t \x02(\r\x12\x1d\n\x15\x65xpected_psoc_version\x18\n \x01(\r\"u\n\x18\x41polloQAGetCodecResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x46\n\x05\x63odec\x18\x02 \x01(\x0e\x32\x37.apollo.lib.apollo_dev_util_lib.proto.ApolloQAA2dpCodec\"\xa6\x01\n\x1c\x41polloQAGetDspStatusResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x15\n\ris_dsp_loaded\x18\x02 \x02(\x08\x12\x43\n\nsink_state\x18\x03 \x02(\x0e\x32/.apollo.lib.apollo_dev_util_lib.proto.SinkState\x12\x17\n\x0f\x66\x65\x61tures_active\x18\x04 \x02(\r\"\xb9\x01\n\x18\x41polloQAFactoryPlaySound\x12Y\n\x06prompt\x18\x01 \x02(\x0e\x32I.apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryPlaySound.PromptType\"B\n\nPromptType\x12\x1c\n\x18PROMPT_TYPE_BT_CONNECTED\x10\x01\x12\x16\n\x12PROMPT_TYPE_IN_EAR\x10\x02\"\x1c\n\x1a\x41polloQAFactoryInfoRequest\"\xb6\x01\n\x1b\x41polloQAFactoryInfoResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x1b\n\x0c\x63rystal_trim\x18\x02 \x01(\x05\x42\x05\x92?\x02\x38\x10\x12\x19\n\x11\x63rash_dump_exists\x18\x03 \x01(\x08\x12!\n\x19is_developer_mode_enabled\x18\x04 \x01(\x08\x12\x1b\n\x13is_always_connected\x18\x05 \x01(\x08\x12\x0c\n\x04hwid\x18\x06 \x01(\r*\xb8\x01\n\x13\x41polloQAMessageType\x12\t\n\x05TRACE\x10\x01\x12\x14\n\x10GET_VER_RESPONSE\x10\x02\x12\x16\n\x12GET_CODEC_RESPONSE\x10\x03\x12\x1b\n\x17GET_DSP_STATUS_RESPONSE\x10\x04\x12\x16\n\x12\x46\x41\x43TORY_PLAY_SOUND\x10\x05\x12\x18\n\x14\x46\x41\x43TORY_INFO_REQUEST\x10\x06\x12\x19\n\x15\x46\x41\x43TORY_INFO_RESPONSE\x10\x07*\xfc\x02\n\x07TraceId\x12\x17\n\x13OTA_ERASE_PARTITION\x10\x01\x12\x1d\n\x19OTA_START_PARTITION_WRITE\x10\x02\x12 \n\x1cOTA_FINISHED_PARTITION_WRITE\x10\x03\x12\x17\n\x13OTA_SIGNATURE_START\x10\x04\x12\x19\n\x15OTA_SIGNATURE_FAILURE\x10\x05\x12\x19\n\x15OTA_TRIGGERING_LOADER\x10\x06\x12\x1c\n\x18OTA_LOADER_VERIFY_FAILED\x10\x07\x12\x10\n\x0cOTA_PROGRESS\x10\x08\x12\x0f\n\x0bOTA_ABORTED\x10\t\x12\x1c\n\x18\x41VRCP_PLAY_STATUS_CHANGE\x10\n\x12\x11\n\rVOLUME_CHANGE\x10\x0b\x12\x1a\n\x16\x43OMMANDER_RECV_COMMAND\x10\x0c\x12\x1c\n\x18\x43OMMANDER_FINISH_COMMAND\x10\r\x12\x1c\n\x18\x43OMMANDER_REJECT_COMMAND\x10\x0e*m\n\x0f\x41vrcpPlayStatus\x12\x0b\n\x07STOPPED\x10\x00\x12\x0b\n\x07PLAYING\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x46WD_SEEK\x10\x08\x12\x0c\n\x08REV_SEEK\x10\x10\x12\t\n\x05\x45RROR\x10\x05\x12\r\n\tSEEK_MASK\x10\x18*4\n\x12PreviousBootStatus\x12\x0f\n\x0bOTA_SUCCESS\x10\x01\x12\r\n\tOTA_ERROR\x10\x02*%\n\x11\x41polloQAA2dpCodec\x12\x07\n\x03\x41\x41\x43\x10\x01\x12\x07\n\x03SBC\x10\x02*\xd8\x02\n\tSinkState\x12\t\n\x05LIMBO\x10\x00\x12\x0f\n\x0b\x43ONNECTABLE\x10\x01\x12\x10\n\x0c\x44ISCOVERABLE\x10\x02\x12\r\n\tCONNECTED\x10\x03\x12\x1c\n\x18OUTGOING_CALLS_ESTABLISH\x10\x04\x12\x1c\n\x18INCOMING_CALLS_ESTABLISH\x10\x05\x12\x13\n\x0f\x41\x43TIVE_CALL_SCO\x10\x06\x12\r\n\tTEST_MODE\x10\x07\x12\x1a\n\x16THREE_WAY_CALL_WAITING\x10\x08\x12\x1a\n\x16THREE_WAY_CALL_ON_HOLD\x10\t\x12\x17\n\x13THREE_WAY_MULTICALL\x10\n\x12\x19\n\x15INCOMING_CALL_ON_HOLD\x10\x0b\x12\x16\n\x12\x41\x43TIVE_CALL_NO_SCO\x10\x0c\x12\x12\n\x0e\x41\x32\x44P_STREAMING\x10\r\x12\x16\n\x12\x44\x45VICE_LOW_BATTERY\x10\x0e\x42)\n\x1d\x63om.google.android.bisto.nanoB\x08\x41polloQA')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'apollo_qa_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\035com.google.android.bisto.nanoB\010ApolloQA'
-  _APOLLOQATRACE.fields_by_name['data']._options = None
-  _APOLLOQATRACE.fields_by_name['data']._serialized_options = b'\020\001\222?\002\020\005'
-  _APOLLOQAGETVERRESPONSE.fields_by_name['build_label']._options = None
-  _APOLLOQAGETVERRESPONSE.fields_by_name['build_label']._serialized_options = b'\222?\002\010 '
-  _APOLLOQAFACTORYINFORESPONSE.fields_by_name['crystal_trim']._options = None
-  _APOLLOQAFACTORYINFORESPONSE.fields_by_name['crystal_trim']._serialized_options = b'\222?\0028\020'
-  _APOLLOQAMESSAGETYPE._serialized_start=1217
-  _APOLLOQAMESSAGETYPE._serialized_end=1401
-  _TRACEID._serialized_start=1404
-  _TRACEID._serialized_end=1784
-  _AVRCPPLAYSTATUS._serialized_start=1786
-  _AVRCPPLAYSTATUS._serialized_end=1895
-  _PREVIOUSBOOTSTATUS._serialized_start=1897
-  _PREVIOUSBOOTSTATUS._serialized_end=1949
-  _APOLLOQAA2DPCODEC._serialized_start=1951
-  _APOLLOQAA2DPCODEC._serialized_end=1988
-  _SINKSTATE._serialized_start=1991
-  _SINKSTATE._serialized_end=2335
-  _APOLLOQATRACE._serialized_start=71
-  _APOLLOQATRACE._serialized_end=187
-  _APOLLOQAGETVERRESPONSE._serialized_start=190
-  _APOLLOQAGETVERRESPONSE._serialized_end=523
-  _APOLLOQAGETCODECRESPONSE._serialized_start=525
-  _APOLLOQAGETCODECRESPONSE._serialized_end=642
-  _APOLLOQAGETDSPSTATUSRESPONSE._serialized_start=645
-  _APOLLOQAGETDSPSTATUSRESPONSE._serialized_end=811
-  _APOLLOQAFACTORYPLAYSOUND._serialized_start=814
-  _APOLLOQAFACTORYPLAYSOUND._serialized_end=999
-  _APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE._serialized_start=933
-  _APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE._serialized_end=999
-  _APOLLOQAFACTORYINFOREQUEST._serialized_start=1001
-  _APOLLOQAFACTORYINFOREQUEST._serialized_end=1029
-  _APOLLOQAFACTORYINFORESPONSE._serialized_start=1032
-  _APOLLOQAFACTORYINFORESPONSE._serialized_end=1214
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py
deleted file mode 100644
index 123a079..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: audiowear.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x61udiowear.proto\x12$apollo.lib.apollo_dev_util_lib.proto*\x8d\x02\n\x0cMessageGroup\x12\x19\n\x15UNKNOWN_MESSAGE_GROUP\x10\x00\x12\x10\n\x0c\x44\x45VICE_INPUT\x10\x01\x12\x07\n\x03OTA\x10\x02\x12\x15\n\x11\x44\x45VICE_CAPABILITY\x10\x03\x12\x11\n\rDEVICE_STATUS\x10\x04\x12\x0b\n\x07LOGGING\x10\x05\x12\x0b\n\x07SENSORS\x10\x06\x12\x14\n\x10\x43OMPANION_STATUS\x10\x07\x12\x12\n\x0e\x44\x45VICE_COMMAND\x10\x08\x12\x12\n\x0e\x42ISTO_SETTINGS\x10\t\x12\x0c\n\x08WELLNESS\x10\n\x12\x08\n\x04TEST\x10\x0b\x12\x0f\n\x0b\x42LE_SERVICE\x10\x0c\x12\r\n\tAPOLLO_QA\x10~\x12\r\n\tTRANSLATE\x10\x7f\x42)\n\x1d\x63om.google.android.bisto.nanoB\x08Protocol')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'audiowear_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\035com.google.android.bisto.nanoB\010Protocol'
-  _MESSAGEGROUP._serialized_start=58
-  _MESSAGEGROUP._serialized_end=327
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py
deleted file mode 100644
index dd9775c..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py
+++ /dev/null
@@ -1,1482 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/descriptor.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-if _descriptor._USE_C_DESCRIPTORS == False:
-  DESCRIPTOR = _descriptor.FileDescriptor(
-    name='google/protobuf/descriptor.proto',
-    package='google.protobuf',
-    syntax='proto2',
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-    serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xe4\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xa9\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xcc\x04\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe6\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x02\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xb1\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01'
-  )
-else:
-  DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xe4\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xa9\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xcc\x04\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe6\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x02\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xb1\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
-
-if _descriptor._USE_C_DESCRIPTORS == False:
-  _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor(
-    name='Type',
-    full_name='google.protobuf.FieldDescriptorProto.Type',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_DOUBLE', index=0, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_FLOAT', index=1, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_INT64', index=2, number=3,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_UINT64', index=3, number=4,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_INT32', index=4, number=5,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_FIXED64', index=5, number=6,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_FIXED32', index=6, number=7,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_BOOL', index=7, number=8,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_STRING', index=8, number=9,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_GROUP', index=9, number=10,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_MESSAGE', index=10, number=11,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_BYTES', index=11, number=12,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_UINT32', index=12, number=13,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_ENUM', index=13, number=14,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SFIXED32', index=14, number=15,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SFIXED64', index=15, number=16,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SINT32', index=16, number=17,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SINT64', index=17, number=18,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
-
-  _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
-    name='Label',
-    full_name='google.protobuf.FieldDescriptorProto.Label',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='LABEL_OPTIONAL', index=0, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='LABEL_REQUIRED', index=1, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='LABEL_REPEATED', index=2, number=3,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
-
-  _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
-    name='OptimizeMode',
-    full_name='google.protobuf.FileOptions.OptimizeMode',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='SPEED', index=0, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='CODE_SIZE', index=1, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='LITE_RUNTIME', index=2, number=3,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
-
-  _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
-    name='CType',
-    full_name='google.protobuf.FieldOptions.CType',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='STRING', index=0, number=0,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='CORD', index=1, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='STRING_PIECE', index=2, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
-
-
-  _FILEDESCRIPTORSET = _descriptor.Descriptor(
-    name='FileDescriptorSet',
-    full_name='google.protobuf.FileDescriptorSet',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0,
-        number=1, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _FILEDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='FileDescriptorProto',
-    full_name='google.protobuf.FileDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1,
-        number=2, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2,
-        number=3, type=9, cpp_type=9, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3,
-        number=10, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4,
-        number=11, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5,
-        number=4, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6,
-        number=5, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7,
-        number=6, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8,
-        number=7, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9,
-        number=8, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10,
-        number=9, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11,
-        number=12, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor(
-    name='ExtensionRange',
-    full_name='google.protobuf.DescriptorProto.ExtensionRange',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0,
-        number=1, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1,
-        number=2, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _DESCRIPTORPROTO = _descriptor.Descriptor(
-    name='DescriptorProto',
-    full_name='google.protobuf.DescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.DescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='field', full_name='google.protobuf.DescriptorProto.field', index=1,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2,
-        number=6, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3,
-        number=3, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4,
-        number=4, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5,
-        number=5, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6,
-        number=8, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.DescriptorProto.options', index=7,
-        number=7, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, ],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _FIELDDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='FieldDescriptorProto',
-    full_name='google.protobuf.FieldDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1,
-        number=3, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2,
-        number=4, type=14, cpp_type=8, label=1,
-        has_default_value=False, default_value=1,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3,
-        number=5, type=14, cpp_type=8, label=1,
-        has_default_value=False, default_value=1,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4,
-        number=6, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5,
-        number=2, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6,
-        number=7, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7,
-        number=9, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=8,
-        number=8, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-      _FIELDDESCRIPTORPROTO_TYPE,
-      _FIELDDESCRIPTORPROTO_LABEL,
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='OneofDescriptorProto',
-    full_name='google.protobuf.OneofDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='EnumDescriptorProto',
-    full_name='google.protobuf.EnumDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2,
-        number=3, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='EnumValueDescriptorProto',
-    full_name='google.protobuf.EnumValueDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1,
-        number=2, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2,
-        number=3, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='ServiceDescriptorProto',
-    full_name='google.protobuf.ServiceDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2,
-        number=3, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _METHODDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='MethodDescriptorProto',
-    full_name='google.protobuf.MethodDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1,
-        number=2, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2,
-        number=3, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3,
-        number=4, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4,
-        number=5, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5,
-        number=6, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _FILEOPTIONS = _descriptor.Descriptor(
-    name='FileOptions',
-    full_name='google.protobuf.FileOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1,
-        number=8, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2,
-        number=10, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3,
-        number=20, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4,
-        number=27, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5,
-        number=9, type=14, cpp_type=8, label=1,
-        has_default_value=True, default_value=1,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6,
-        number=11, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7,
-        number=16, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8,
-        number=17, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9,
-        number=18, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=10,
-        number=23, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=11,
-        number=31, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=12,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-      _FILEOPTIONS_OPTIMIZEMODE,
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _MESSAGEOPTIONS = _descriptor.Descriptor(
-    name='MessageOptions',
-    full_name='google.protobuf.MessageOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0,
-        number=1, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1,
-        number=2, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2,
-        number=3, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3,
-        number=7, type=8, cpp_type=7, label=1,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _FIELDOPTIONS = _descriptor.Descriptor(
-    name='FieldOptions',
-    full_name='google.protobuf.FieldOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0,
-        number=1, type=14, cpp_type=8, label=1,
-        has_default_value=True, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='packed', full_name='google.protobuf.FieldOptions.packed', index=1,
-        number=2, type=8, cpp_type=7, label=1,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=2,
-        number=5, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=3,
-        number=3, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='weak', full_name='google.protobuf.FieldOptions.weak', index=4,
-        number=10, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=5,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-      _FIELDOPTIONS_CTYPE,
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMOPTIONS = _descriptor.Descriptor(
-    name='EnumOptions',
-    full_name='google.protobuf.EnumOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0,
-        number=2, type=8, cpp_type=7, label=1,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1,
-        number=3, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMVALUEOPTIONS = _descriptor.Descriptor(
-    name='EnumValueOptions',
-    full_name='google.protobuf.EnumValueOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0,
-        number=1, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _SERVICEOPTIONS = _descriptor.Descriptor(
-    name='ServiceOptions',
-    full_name='google.protobuf.ServiceOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0,
-        number=33, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _METHODOPTIONS = _descriptor.Descriptor(
-    name='MethodOptions',
-    full_name='google.protobuf.MethodOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0,
-        number=33, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=1,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor(
-    name='NamePart',
-    full_name='google.protobuf.UninterpretedOption.NamePart',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0,
-        number=1, type=9, cpp_type=9, label=2,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
-        number=2, type=8, cpp_type=7, label=2,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _UNINTERPRETEDOPTION = _descriptor.Descriptor(
-    name='UninterpretedOption',
-    full_name='google.protobuf.UninterpretedOption',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.UninterpretedOption.name', index=0,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1,
-        number=3, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2,
-        number=4, type=4, cpp_type=4, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3,
-        number=5, type=3, cpp_type=2, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4,
-        number=6, type=1, cpp_type=5, label=1,
-        has_default_value=False, default_value=float(0),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5,
-        number=7, type=12, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"",
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6,
-        number=8, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _SOURCECODEINFO_LOCATION = _descriptor.Descriptor(
-    name='Location',
-    full_name='google.protobuf.SourceCodeInfo.Location',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0,
-        number=1, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1,
-        number=2, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2,
-        number=3, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3,
-        number=4, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _SOURCECODEINFO = _descriptor.Descriptor(
-    name='SourceCodeInfo',
-    full_name='google.protobuf.SourceCodeInfo',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0,
-        number=1, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[_SOURCECODEINFO_LOCATION, ],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
-  _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
-  _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
-  _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
-  _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
-  _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
-  _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
-  _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
-  _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
-  _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
-  _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
-  _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
-  _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
-  _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
-  _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
-  _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
-  _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
-  _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
-  _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
-  _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
-  _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
-  _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
-  _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
-  DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
-  DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS
-  DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS
-  DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS
-  DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS
-  DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS
-  DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS
-  DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS
-  DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
-  DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
-  _sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-else:
-  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  _FILEDESCRIPTORSET._serialized_start=53
-  _FILEDESCRIPTORSET._serialized_end=124
-  _FILEDESCRIPTORPROTO._serialized_start=127
-  _FILEDESCRIPTORPROTO._serialized_end=602
-  _DESCRIPTORPROTO._serialized_start=605
-  _DESCRIPTORPROTO._serialized_end=1089
-  _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1045
-  _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1089
-  _FIELDDESCRIPTORPROTO._serialized_start=1092
-  _FIELDDESCRIPTORPROTO._serialized_end=1773
-  _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1394
-  _FIELDDESCRIPTORPROTO_TYPE._serialized_end=1704
-  _FIELDDESCRIPTORPROTO_LABEL._serialized_start=1706
-  _FIELDDESCRIPTORPROTO_LABEL._serialized_end=1773
-  _ONEOFDESCRIPTORPROTO._serialized_start=1775
-  _ONEOFDESCRIPTORPROTO._serialized_end=1811
-  _ENUMDESCRIPTORPROTO._serialized_start=1814
-  _ENUMDESCRIPTORPROTO._serialized_end=1954
-  _ENUMVALUEDESCRIPTORPROTO._serialized_start=1956
-  _ENUMVALUEDESCRIPTORPROTO._serialized_end=2064
-  _SERVICEDESCRIPTORPROTO._serialized_start=2067
-  _SERVICEDESCRIPTORPROTO._serialized_end=2211
-  _METHODDESCRIPTORPROTO._serialized_start=2214
-  _METHODDESCRIPTORPROTO._serialized_end=2407
-  _FILEOPTIONS._serialized_start=2410
-  _FILEOPTIONS._serialized_end=2998
-  _FILEOPTIONS_OPTIMIZEMODE._serialized_start=2929
-  _FILEOPTIONS_OPTIMIZEMODE._serialized_end=2987
-  _MESSAGEOPTIONS._serialized_start=3001
-  _MESSAGEOPTIONS._serialized_end=3231
-  _FIELDOPTIONS._serialized_start=3234
-  _FIELDOPTIONS._serialized_end=3522
-  _FIELDOPTIONS_CTYPE._serialized_start=3464
-  _FIELDOPTIONS_CTYPE._serialized_end=3511
-  _ENUMOPTIONS._serialized_start=3525
-  _ENUMOPTIONS._serialized_end=3666
-  _ENUMVALUEOPTIONS._serialized_start=3668
-  _ENUMVALUEOPTIONS._serialized_end=3793
-  _SERVICEOPTIONS._serialized_start=3795
-  _SERVICEOPTIONS._serialized_end=3918
-  _METHODOPTIONS._serialized_start=3920
-  _METHODOPTIONS._serialized_end=4042
-  _UNINTERPRETEDOPTION._serialized_start=4045
-  _UNINTERPRETEDOPTION._serialized_end=4331
-  _UNINTERPRETEDOPTION_NAMEPART._serialized_start=4280
-  _UNINTERPRETEDOPTION_NAMEPART._serialized_end=4331
-  _SOURCECODEINFO._serialized_start=4334
-  _SOURCECODEINFO._serialized_end=4511
-  _SOURCECODEINFO_LOCATION._serialized_start=4412
-  _SOURCECODEINFO_LOCATION._serialized_end=4511
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py
deleted file mode 100644
index c23077a..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: nanopb.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cnanopb.proto\x1a google/protobuf/descriptor.proto\"\x80\x02\n\rNanoPBOptions\x12\x10\n\x08max_size\x18\x01 \x01(\x05\x12\x11\n\tmax_count\x18\x02 \x01(\x05\x12&\n\x08int_size\x18\x07 \x01(\x0e\x32\x08.IntSize:\nIS_DEFAULT\x12$\n\x04type\x18\x03 \x01(\x0e\x32\n.FieldType:\nFT_DEFAULT\x12\x18\n\nlong_names\x18\x04 \x01(\x08:\x04true\x12\x1c\n\rpacked_struct\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0cskip_message\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tno_unions\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\r\n\x05msgid\x18\t \x01(\r*Z\n\tFieldType\x12\x0e\n\nFT_DEFAULT\x10\x00\x12\x0f\n\x0b\x46T_CALLBACK\x10\x01\x12\x0e\n\nFT_POINTER\x10\x04\x12\r\n\tFT_STATIC\x10\x02\x12\r\n\tFT_IGNORE\x10\x03*D\n\x07IntSize\x12\x0e\n\nIS_DEFAULT\x10\x00\x12\x08\n\x04IS_8\x10\x08\x12\t\n\x05IS_16\x10\x10\x12\t\n\x05IS_32\x10 \x12\t\n\x05IS_64\x10@:E\n\x0enanopb_fileopt\x12\x1c.google.protobuf.FileOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:G\n\rnanopb_msgopt\x12\x1f.google.protobuf.MessageOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:E\n\x0enanopb_enumopt\x12\x1c.google.protobuf.EnumOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:>\n\x06nanopb\x12\x1d.google.protobuf.FieldOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptionsB\x1a\n\x18\x66i.kapsi.koti.jpa.nanopb')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'nanopb_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-  google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(nanopb_fileopt)
-  google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(nanopb_msgopt)
-  google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(nanopb_enumopt)
-  google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(nanopb)
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\030fi.kapsi.koti.jpa.nanopb'
-  _FIELDTYPE._serialized_start=309
-  _FIELDTYPE._serialized_end=399
-  _INTSIZE._serialized_start=401
-  _INTSIZE._serialized_end=469
-  _NANOPBOPTIONS._serialized_start=51
-  _NANOPBOPTIONS._serialized_end=307
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py
deleted file mode 100644
index 79fffcd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: plugin.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cplugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\tB,\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtos')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'plugin_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtos'
-  _CODEGENERATORREQUEST._serialized_start=76
-  _CODEGENERATORREQUEST._serialized_end=201
-  _CODEGENERATORRESPONSE._serialized_start=204
-  _CODEGENERATORRESPONSE._serialized_end=374
-  _CODEGENERATORRESPONSE_FILE._serialized_start=312
-  _CODEGENERATORRESPONSE_FILE._serialized_end=374
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto
deleted file mode 100644
index e17c0cc..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto
+++ /dev/null
@@ -1,714 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc.  All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//     * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: kenton@google.com (Kenton Varda)
-//  Based on original Protocol Buffers design by
-//  Sanjay Ghemawat, Jeff Dean, and others.
-//
-// The messages in this file describe the definitions found in .proto files.
-// A valid .proto file can be translated directly to a FileDescriptorProto
-// without any other information (e.g. without reading its imports).
-
-
-syntax = "proto2";
-
-package google.protobuf;
-option java_package = "com.google.protobuf";
-option java_outer_classname = "DescriptorProtos";
-
-// descriptor.proto must be optimized for speed because reflection-based
-// algorithms don't work during bootstrapping.
-option optimize_for = SPEED;
-
-// The protocol compiler can output a FileDescriptorSet containing the .proto
-// files it parses.
-message FileDescriptorSet {
-  repeated FileDescriptorProto file = 1;
-}
-
-// Describes a complete .proto file.
-message FileDescriptorProto {
-  optional string name = 1;       // file name, relative to root of source tree
-  optional string package = 2;    // e.g. "foo", "foo.bar", etc.
-
-  // Names of files imported by this file.
-  repeated string dependency = 3;
-  // Indexes of the public imported files in the dependency list above.
-  repeated int32 public_dependency = 10;
-  // Indexes of the weak imported files in the dependency list.
-  // For Google-internal migration only. Do not use.
-  repeated int32 weak_dependency = 11;
-
-  // All top-level definitions in this file.
-  repeated DescriptorProto message_type = 4;
-  repeated EnumDescriptorProto enum_type = 5;
-  repeated ServiceDescriptorProto service = 6;
-  repeated FieldDescriptorProto extension = 7;
-
-  optional FileOptions options = 8;
-
-  // This field contains optional information about the original source code.
-  // You may safely remove this entire field without harming runtime
-  // functionality of the descriptors -- the information is needed only by
-  // development tools.
-  optional SourceCodeInfo source_code_info = 9;
-
-  // The syntax of the proto file.
-  // The supported values are "proto2" and "proto3".
-  optional string syntax = 12;
-}
-
-// Describes a message type.
-message DescriptorProto {
-  optional string name = 1;
-
-  repeated FieldDescriptorProto field = 2;
-  repeated FieldDescriptorProto extension = 6;
-
-  repeated DescriptorProto nested_type = 3;
-  repeated EnumDescriptorProto enum_type = 4;
-
-  message ExtensionRange {
-    optional int32 start = 1;
-    optional int32 end = 2;
-  }
-  repeated ExtensionRange extension_range = 5;
-
-  repeated OneofDescriptorProto oneof_decl = 8;
-
-  optional MessageOptions options = 7;
-}
-
-// Describes a field within a message.
-message FieldDescriptorProto {
-  enum Type {
-    // 0 is reserved for errors.
-    // Order is weird for historical reasons.
-    TYPE_DOUBLE         = 1;
-    TYPE_FLOAT          = 2;
-    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-    // negative values are likely.
-    TYPE_INT64          = 3;
-    TYPE_UINT64         = 4;
-    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-    // negative values are likely.
-    TYPE_INT32          = 5;
-    TYPE_FIXED64        = 6;
-    TYPE_FIXED32        = 7;
-    TYPE_BOOL           = 8;
-    TYPE_STRING         = 9;
-    TYPE_GROUP          = 10;  // Tag-delimited aggregate.
-    TYPE_MESSAGE        = 11;  // Length-delimited aggregate.
-
-    // New in version 2.
-    TYPE_BYTES          = 12;
-    TYPE_UINT32         = 13;
-    TYPE_ENUM           = 14;
-    TYPE_SFIXED32       = 15;
-    TYPE_SFIXED64       = 16;
-    TYPE_SINT32         = 17;  // Uses ZigZag encoding.
-    TYPE_SINT64         = 18;  // Uses ZigZag encoding.
-  };
-
-  enum Label {
-    // 0 is reserved for errors
-    LABEL_OPTIONAL      = 1;
-    LABEL_REQUIRED      = 2;
-    LABEL_REPEATED      = 3;
-    // TODO(sanjay): Should we add LABEL_MAP?
-  };
-
-  optional string name = 1;
-  optional int32 number = 3;
-  optional Label label = 4;
-
-  // If type_name is set, this need not be set.  If both this and type_name
-  // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
-  optional Type type = 5;
-
-  // For message and enum types, this is the name of the type.  If the name
-  // starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
-  // rules are used to find the type (i.e. first the nested types within this
-  // message are searched, then within the parent, on up to the root
-  // namespace).
-  optional string type_name = 6;
-
-  // For extensions, this is the name of the type being extended.  It is
-  // resolved in the same manner as type_name.
-  optional string extendee = 2;
-
-  // For numeric types, contains the original text representation of the value.
-  // For booleans, "true" or "false".
-  // For strings, contains the default text contents (not escaped in any way).
-  // For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
-  // TODO(kenton):  Base-64 encode?
-  optional string default_value = 7;
-
-  // If set, gives the index of a oneof in the containing type's oneof_decl
-  // list.  This field is a member of that oneof.  Extensions of a oneof should
-  // not set this since the oneof to which they belong will be inferred based
-  // on the extension range containing the extension's field number.
-  optional int32 oneof_index = 9;
-
-  optional FieldOptions options = 8;
-}
-
-// Describes a oneof.
-message OneofDescriptorProto {
-  optional string name = 1;
-}
-
-// Describes an enum type.
-message EnumDescriptorProto {
-  optional string name = 1;
-
-  repeated EnumValueDescriptorProto value = 2;
-
-  optional EnumOptions options = 3;
-}
-
-// Describes a value within an enum.
-message EnumValueDescriptorProto {
-  optional string name = 1;
-  optional int32 number = 2;
-
-  optional EnumValueOptions options = 3;
-}
-
-// Describes a service.
-message ServiceDescriptorProto {
-  optional string name = 1;
-  repeated MethodDescriptorProto method = 2;
-
-  optional ServiceOptions options = 3;
-}
-
-// Describes a method of a service.
-message MethodDescriptorProto {
-  optional string name = 1;
-
-  // Input and output type names.  These are resolved in the same way as
-  // FieldDescriptorProto.type_name, but must refer to a message type.
-  optional string input_type = 2;
-  optional string output_type = 3;
-
-  optional MethodOptions options = 4;
-
-  // Identifies if client streams multiple client messages
-  optional bool client_streaming = 5 [default=false];
-  // Identifies if server streams multiple server messages
-  optional bool server_streaming = 6 [default=false];
-}
-
-
-// ===================================================================
-// Options
-
-// Each of the definitions above may have "options" attached.  These are
-// just annotations which may cause code to be generated slightly differently
-// or may contain hints for code that manipulates protocol messages.
-//
-// Clients may define custom options as extensions of the *Options messages.
-// These extensions may not yet be known at parsing time, so the parser cannot
-// store the values in them.  Instead it stores them in a field in the *Options
-// message called uninterpreted_option. This field must have the same name
-// across all *Options messages. We then use this field to populate the
-// extensions when we build a descriptor, at which point all protos have been
-// parsed and so all extensions are known.
-//
-// Extension numbers for custom options may be chosen as follows:
-// * For options which will only be used within a single application or
-//   organization, or for experimental options, use field numbers 50000
-//   through 99999.  It is up to you to ensure that you do not use the
-//   same number for multiple options.
-// * For options which will be published and used publicly by multiple
-//   independent entities, e-mail protobuf-global-extension-registry@google.com
-//   to reserve extension numbers. Simply provide your project name (e.g.
-//   Object-C plugin) and your porject website (if available) -- there's no need
-//   to explain how you intend to use them. Usually you only need one extension
-//   number. You can declare multiple options with only one extension number by
-//   putting them in a sub-message. See the Custom Options section of the docs
-//   for examples:
-//   https://developers.google.com/protocol-buffers/docs/proto#options
-//   If this turns out to be popular, a web service will be set up
-//   to automatically assign option numbers.
-
-
-message FileOptions {
-
-  // Sets the Java package where classes generated from this .proto will be
-  // placed.  By default, the proto package is used, but this is often
-  // inappropriate because proto packages do not normally start with backwards
-  // domain names.
-  optional string java_package = 1;
-
-
-  // If set, all the classes from the .proto file are wrapped in a single
-  // outer class with the given name.  This applies to both Proto1
-  // (equivalent to the old "--one_java_file" option) and Proto2 (where
-  // a .proto always translates to a single class, but you may want to
-  // explicitly choose the class name).
-  optional string java_outer_classname = 8;
-
-  // If set true, then the Java code generator will generate a separate .java
-  // file for each top-level message, enum, and service defined in the .proto
-  // file.  Thus, these types will *not* be nested inside the outer class
-  // named by java_outer_classname.  However, the outer class will still be
-  // generated to contain the file's getDescriptor() method as well as any
-  // top-level extensions defined in the file.
-  optional bool java_multiple_files = 10 [default=false];
-
-  // If set true, then the Java code generator will generate equals() and
-  // hashCode() methods for all messages defined in the .proto file.
-  // - In the full runtime, this is purely a speed optimization, as the
-  // AbstractMessage base class includes reflection-based implementations of
-  // these methods.
-  //- In the lite runtime, setting this option changes the semantics of
-  // equals() and hashCode() to more closely match those of the full runtime;
-  // the generated methods compute their results based on field values rather
-  // than object identity. (Implementations should not assume that hashcodes
-  // will be consistent across runtimes or versions of the protocol compiler.)
-  optional bool java_generate_equals_and_hash = 20 [default=false];
-
-  // If set true, then the Java2 code generator will generate code that
-  // throws an exception whenever an attempt is made to assign a non-UTF-8
-  // byte sequence to a string field.
-  // Message reflection will do the same.
-  // However, an extension field still accepts non-UTF-8 byte sequences.
-  // This option has no effect on when used with the lite runtime.
-  optional bool java_string_check_utf8 = 27 [default=false];
-
-
-  // Generated classes can be optimized for speed or code size.
-  enum OptimizeMode {
-    SPEED = 1;        // Generate complete code for parsing, serialization,
-                      // etc.
-    CODE_SIZE = 2;    // Use ReflectionOps to implement these methods.
-    LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
-  }
-  optional OptimizeMode optimize_for = 9 [default=SPEED];
-
-  // Sets the Go package where structs generated from this .proto will be
-  // placed. If omitted, the Go package will be derived from the following:
-  //   - The basename of the package import path, if provided.
-  //   - Otherwise, the package statement in the .proto file, if present.
-  //   - Otherwise, the basename of the .proto file, without extension.
-  optional string go_package = 11;
-
-
-
-  // Should generic services be generated in each language?  "Generic" services
-  // are not specific to any particular RPC system.  They are generated by the
-  // main code generators in each language (without additional plugins).
-  // Generic services were the only kind of service generation supported by
-  // early versions of google.protobuf.
-  //
-  // Generic services are now considered deprecated in favor of using plugins
-  // that generate code specific to your particular RPC system.  Therefore,
-  // these default to false.  Old code which depends on generic services should
-  // explicitly set them to true.
-  optional bool cc_generic_services = 16 [default=false];
-  optional bool java_generic_services = 17 [default=false];
-  optional bool py_generic_services = 18 [default=false];
-
-  // Is this file deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for everything in the file, or it will be completely ignored; in the very
-  // least, this is a formalization for deprecating files.
-  optional bool deprecated = 23 [default=false];
-
-
-  // Enables the use of arenas for the proto messages in this file. This applies
-  // only to generated classes for C++.
-  optional bool cc_enable_arenas = 31 [default=false];
-
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message MessageOptions {
-  // Set true to use the old proto1 MessageSet wire format for extensions.
-  // This is provided for backwards-compatibility with the MessageSet wire
-  // format.  You should not use this for any other reason:  It's less
-  // efficient, has fewer features, and is more complicated.
-  //
-  // The message must be defined exactly as follows:
-  //   message Foo {
-  //     option message_set_wire_format = true;
-  //     extensions 4 to max;
-  //   }
-  // Note that the message cannot have any defined fields; MessageSets only
-  // have extensions.
-  //
-  // All extensions of your type must be singular messages; e.g. they cannot
-  // be int32s, enums, or repeated messages.
-  //
-  // Because this is an option, the above two restrictions are not enforced by
-  // the protocol compiler.
-  optional bool message_set_wire_format = 1 [default=false];
-
-  // Disables the generation of the standard "descriptor()" accessor, which can
-  // conflict with a field of the same name.  This is meant to make migration
-  // from proto1 easier; new code should avoid fields named "descriptor".
-  optional bool no_standard_descriptor_accessor = 2 [default=false];
-
-  // Is this message deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the message, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating messages.
-  optional bool deprecated = 3 [default=false];
-
-  // Whether the message is an automatically generated map entry type for the
-  // maps field.
-  //
-  // For maps fields:
-  //     map<KeyType, ValueType> map_field = 1;
-  // The parsed descriptor looks like:
-  //     message MapFieldEntry {
-  //         option map_entry = true;
-  //         optional KeyType key = 1;
-  //         optional ValueType value = 2;
-  //     }
-  //     repeated MapFieldEntry map_field = 1;
-  //
-  // Implementations may choose not to generate the map_entry=true message, but
-  // use a native map in the target language to hold the keys and values.
-  // The reflection APIs in such implementions still need to work as
-  // if the field is a repeated message field.
-  //
-  // NOTE: Do not set the option in .proto files. Always use the maps syntax
-  // instead. The option should only be implicitly set by the proto compiler
-  // parser.
-  optional bool map_entry = 7;
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message FieldOptions {
-  // The ctype option instructs the C++ code generator to use a different
-  // representation of the field than it normally would.  See the specific
-  // options below.  This option is not yet implemented in the open source
-  // release -- sorry, we'll try to include it in a future version!
-  optional CType ctype = 1 [default = STRING];
-  enum CType {
-    // Default mode.
-    STRING = 0;
-
-    CORD = 1;
-
-    STRING_PIECE = 2;
-  }
-  // The packed option can be enabled for repeated primitive fields to enable
-  // a more efficient representation on the wire. Rather than repeatedly
-  // writing the tag and type for each element, the entire array is encoded as
-  // a single length-delimited blob.
-  optional bool packed = 2;
-
-
-
-  // Should this field be parsed lazily?  Lazy applies only to message-type
-  // fields.  It means that when the outer message is initially parsed, the
-  // inner message's contents will not be parsed but instead stored in encoded
-  // form.  The inner message will actually be parsed when it is first accessed.
-  //
-  // This is only a hint.  Implementations are free to choose whether to use
-  // eager or lazy parsing regardless of the value of this option.  However,
-  // setting this option true suggests that the protocol author believes that
-  // using lazy parsing on this field is worth the additional bookkeeping
-  // overhead typically needed to implement it.
-  //
-  // This option does not affect the public interface of any generated code;
-  // all method signatures remain the same.  Furthermore, thread-safety of the
-  // interface is not affected by this option; const methods remain safe to
-  // call from multiple threads concurrently, while non-const methods continue
-  // to require exclusive access.
-  //
-  //
-  // Note that implementations may choose not to check required fields within
-  // a lazy sub-message.  That is, calling IsInitialized() on the outher message
-  // may return true even if the inner message has missing required fields.
-  // This is necessary because otherwise the inner message would have to be
-  // parsed in order to perform the check, defeating the purpose of lazy
-  // parsing.  An implementation which chooses not to check required fields
-  // must be consistent about it.  That is, for any particular sub-message, the
-  // implementation must either *always* check its required fields, or *never*
-  // check its required fields, regardless of whether or not the message has
-  // been parsed.
-  optional bool lazy = 5 [default=false];
-
-  // Is this field deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for accessors, or it will be completely ignored; in the very least, this
-  // is a formalization for deprecating fields.
-  optional bool deprecated = 3 [default=false];
-
-  // For Google-internal migration only. Do not use.
-  optional bool weak = 10 [default=false];
-
-
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message EnumOptions {
-
-  // Set this option to true to allow mapping different tag names to the same
-  // value.
-  optional bool allow_alias = 2;
-
-  // Is this enum deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the enum, or it will be completely ignored; in the very least, this
-  // is a formalization for deprecating enums.
-  optional bool deprecated = 3 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message EnumValueOptions {
-  // Is this enum value deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the enum value, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating enum values.
-  optional bool deprecated = 1 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message ServiceOptions {
-
-  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
-  //   framework.  We apologize for hoarding these numbers to ourselves, but
-  //   we were already using them long before we decided to release Protocol
-  //   Buffers.
-
-  // Is this service deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the service, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating services.
-  optional bool deprecated = 33 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message MethodOptions {
-
-  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
-  //   framework.  We apologize for hoarding these numbers to ourselves, but
-  //   we were already using them long before we decided to release Protocol
-  //   Buffers.
-
-  // Is this method deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the method, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating methods.
-  optional bool deprecated = 33 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-
-// A message representing a option the parser does not recognize. This only
-// appears in options protos created by the compiler::Parser class.
-// DescriptorPool resolves these when building Descriptor objects. Therefore,
-// options protos in descriptor objects (e.g. returned by Descriptor::options(),
-// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
-// in them.
-message UninterpretedOption {
-  // The name of the uninterpreted option.  Each string represents a segment in
-  // a dot-separated name.  is_extension is true iff a segment represents an
-  // extension (denoted with parentheses in options specs in .proto files).
-  // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
-  // "foo.(bar.baz).qux".
-  message NamePart {
-    required string name_part = 1;
-    required bool is_extension = 2;
-  }
-  repeated NamePart name = 2;
-
-  // The value of the uninterpreted option, in whatever type the tokenizer
-  // identified it as during parsing. Exactly one of these should be set.
-  optional string identifier_value = 3;
-  optional uint64 positive_int_value = 4;
-  optional int64 negative_int_value = 5;
-  optional double double_value = 6;
-  optional bytes string_value = 7;
-  optional string aggregate_value = 8;
-}
-
-// ===================================================================
-// Optional source code info
-
-// Encapsulates information about the original source file from which a
-// FileDescriptorProto was generated.
-message SourceCodeInfo {
-  // A Location identifies a piece of source code in a .proto file which
-  // corresponds to a particular definition.  This information is intended
-  // to be useful to IDEs, code indexers, documentation generators, and similar
-  // tools.
-  //
-  // For example, say we have a file like:
-  //   message Foo {
-  //     optional string foo = 1;
-  //   }
-  // Let's look at just the field definition:
-  //   optional string foo = 1;
-  //   ^       ^^     ^^  ^  ^^^
-  //   a       bc     de  f  ghi
-  // We have the following locations:
-  //   span   path               represents
-  //   [a,i)  [ 4, 0, 2, 0 ]     The whole field definition.
-  //   [a,b)  [ 4, 0, 2, 0, 4 ]  The label (optional).
-  //   [c,d)  [ 4, 0, 2, 0, 5 ]  The type (string).
-  //   [e,f)  [ 4, 0, 2, 0, 1 ]  The name (foo).
-  //   [g,h)  [ 4, 0, 2, 0, 3 ]  The number (1).
-  //
-  // Notes:
-  // - A location may refer to a repeated field itself (i.e. not to any
-  //   particular index within it).  This is used whenever a set of elements are
-  //   logically enclosed in a single code segment.  For example, an entire
-  //   extend block (possibly containing multiple extension definitions) will
-  //   have an outer location whose path refers to the "extensions" repeated
-  //   field without an index.
-  // - Multiple locations may have the same path.  This happens when a single
-  //   logical declaration is spread out across multiple places.  The most
-  //   obvious example is the "extend" block again -- there may be multiple
-  //   extend blocks in the same scope, each of which will have the same path.
-  // - A location's span is not always a subset of its parent's span.  For
-  //   example, the "extendee" of an extension declaration appears at the
-  //   beginning of the "extend" block and is shared by all extensions within
-  //   the block.
-  // - Just because a location's span is a subset of some other location's span
-  //   does not mean that it is a descendent.  For example, a "group" defines
-  //   both a type and a field in a single declaration.  Thus, the locations
-  //   corresponding to the type and field and their components will overlap.
-  // - Code which tries to interpret locations should probably be designed to
-  //   ignore those that it doesn't understand, as more types of locations could
-  //   be recorded in the future.
-  repeated Location location = 1;
-  message Location {
-    // Identifies which part of the FileDescriptorProto was defined at this
-    // location.
-    //
-    // Each element is a field number or an index.  They form a path from
-    // the root FileDescriptorProto to the place where the definition.  For
-    // example, this path:
-    //   [ 4, 3, 2, 7, 1 ]
-    // refers to:
-    //   file.message_type(3)  // 4, 3
-    //       .field(7)         // 2, 7
-    //       .name()           // 1
-    // This is because FileDescriptorProto.message_type has field number 4:
-    //   repeated DescriptorProto message_type = 4;
-    // and DescriptorProto.field has field number 2:
-    //   repeated FieldDescriptorProto field = 2;
-    // and FieldDescriptorProto.name has field number 1:
-    //   optional string name = 1;
-    //
-    // Thus, the above path gives the location of a field name.  If we removed
-    // the last element:
-    //   [ 4, 3, 2, 7 ]
-    // this path refers to the whole field declaration (from the beginning
-    // of the label to the terminating semicolon).
-    repeated int32 path = 1 [packed=true];
-
-    // Always has exactly three or four elements: start line, start column,
-    // end line (optional, otherwise assumed same as start line), end column.
-    // These are packed into a single field for efficiency.  Note that line
-    // and column numbers are zero-based -- typically you will want to add
-    // 1 to each before displaying to a user.
-    repeated int32 span = 2 [packed=true];
-
-    // If this SourceCodeInfo represents a complete declaration, these are any
-    // comments appearing before and after the declaration which appear to be
-    // attached to the declaration.
-    //
-    // A series of line comments appearing on consecutive lines, with no other
-    // tokens appearing on those lines, will be treated as a single comment.
-    //
-    // Only the comment content is provided; comment markers (e.g. //) are
-    // stripped out.  For block comments, leading whitespace and an asterisk
-    // will be stripped from the beginning of each line other than the first.
-    // Newlines are included in the output.
-    //
-    // Examples:
-    //
-    //   optional int32 foo = 1;  // Comment attached to foo.
-    //   // Comment attached to bar.
-    //   optional int32 bar = 2;
-    //
-    //   optional string baz = 3;
-    //   // Comment attached to baz.
-    //   // Another line attached to baz.
-    //
-    //   // Comment attached to qux.
-    //   //
-    //   // Another line attached to qux.
-    //   optional double qux = 4;
-    //
-    //   optional string corge = 5;
-    //   /* Block comment attached
-    //    * to corge.  Leading asterisks
-    //    * will be removed. */
-    //   /* Block comment attached to
-    //    * grault. */
-    //   optional int32 grault = 6;
-    optional string leading_comments = 3;
-    optional string trailing_comments = 4;
-  }
-}
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/nanopb.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/nanopb.proto
deleted file mode 100644
index 5053dfd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/nanopb.proto
+++ /dev/null
@@ -1,89 +0,0 @@
-// Custom options for defining:
-// - Maximum size of string/bytes
-// - Maximum number of elements in array
-//
-// These are used by nanopb to generate statically allocable structures
-// for memory-limited environments.
-
-syntax = "proto2";
-import "google/protobuf/descriptor.proto";
-
-option java_package = "fi.kapsi.koti.jpa.nanopb";
-
-enum FieldType {
-    FT_DEFAULT = 0; // Automatically decide field type, generate static field if possible.
-    FT_CALLBACK = 1; // Always generate a callback field.
-    FT_POINTER = 4; // Always generate a dynamically allocated field.
-    FT_STATIC = 2; // Generate a static field or raise an exception if not possible.
-    FT_IGNORE = 3; // Ignore the field completely.
-}
-
-enum IntSize {
-    IS_DEFAULT = 0; // Default, 32/64bit based on type in .proto
-    IS_8 = 8;
-    IS_16 = 16;
-    IS_32 = 32;
-    IS_64 = 64;
-}
-
-// This is the inner options message, which basically defines options for
-// a field. When it is used in message or file scope, it applies to all
-// fields.
-message NanoPBOptions {
-  // Allocated size for 'bytes' and 'string' fields.
-  optional int32 max_size = 1;
-
-  // Allocated number of entries in arrays ('repeated' fields)
-  optional int32 max_count = 2;
-
-  // Size of integer fields. Can save some memory if you don't need
-  // full 32 bits for the value.
-  optional IntSize int_size = 7 [default = IS_DEFAULT];
-
-  // Force type of field (callback or static allocation)
-  optional FieldType type = 3 [default = FT_DEFAULT];
-
-  // Use long names for enums, i.e. EnumName_EnumValue.
-  optional bool long_names = 4 [default = true];
-
-  // Add 'packed' attribute to generated structs.
-  // Note: this cannot be used on CPUs that break on unaligned
-  // accesses to variables.
-  optional bool packed_struct = 5 [default = false];
-
-  // Skip this message
-  optional bool skip_message = 6 [default = false];
-
-  // Generate oneof fields as normal optional fields instead of union.
-  optional bool no_unions = 8 [default = false];
-
-  // integer type tag for a message
-  optional uint32 msgid = 9;
-}
-
-// Extensions to protoc 'Descriptor' type in order to define options
-// inside a .proto file.
-//
-// Protocol Buffers extension number registry
-// --------------------------------
-// Project:  Nanopb
-// Contact:  Petteri Aimonen <jpa@kapsi.fi>
-// Web site: http://kapsi.fi/~jpa/nanopb
-// Extensions: 1010 (all types)
-// --------------------------------
-
-extend google.protobuf.FileOptions {
-    optional NanoPBOptions nanopb_fileopt = 1010;
-}
-
-extend google.protobuf.MessageOptions {
-    optional NanoPBOptions nanopb_msgopt = 1010;
-}
-
-extend google.protobuf.EnumOptions {
-    optional NanoPBOptions nanopb_enumopt = 1010;
-}
-
-extend google.protobuf.FieldOptions {
-    optional NanoPBOptions nanopb = 1010;
-}
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/plugin.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/plugin.proto
deleted file mode 100644
index e627289..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/plugin.proto
+++ /dev/null
@@ -1,148 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc.  All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//     * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: kenton@google.com (Kenton Varda)
-//
-// WARNING:  The plugin interface is currently EXPERIMENTAL and is subject to
-//   change.
-//
-// protoc (aka the Protocol Compiler) can be extended via plugins.  A plugin is
-// just a program that reads a CodeGeneratorRequest from stdin and writes a
-// CodeGeneratorResponse to stdout.
-//
-// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead
-// of dealing with the raw protocol defined here.
-//
-// A plugin executable needs only to be placed somewhere in the path.  The
-// plugin should be named "protoc-gen-$NAME", and will then be used when the
-// flag "--${NAME}_out" is passed to protoc.
-
-syntax = "proto2";
-package google.protobuf.compiler;
-option java_package = "com.google.protobuf.compiler";
-option java_outer_classname = "PluginProtos";
-
-import "google/protobuf/descriptor.proto";
-
-// An encoded CodeGeneratorRequest is written to the plugin's stdin.
-message CodeGeneratorRequest {
-  // The .proto files that were explicitly listed on the command-line.  The
-  // code generator should generate code only for these files.  Each file's
-  // descriptor will be included in proto_file, below.
-  repeated string file_to_generate = 1;
-
-  // The generator parameter passed on the command-line.
-  optional string parameter = 2;
-
-  // FileDescriptorProtos for all files in files_to_generate and everything
-  // they import.  The files will appear in topological order, so each file
-  // appears before any file that imports it.
-  //
-  // protoc guarantees that all proto_files will be written after
-  // the fields above, even though this is not technically guaranteed by the
-  // protobuf wire format.  This theoretically could allow a plugin to stream
-  // in the FileDescriptorProtos and handle them one by one rather than read
-  // the entire set into memory at once.  However, as of this writing, this
-  // is not similarly optimized on protoc's end -- it will store all fields in
-  // memory at once before sending them to the plugin.
-  repeated FileDescriptorProto proto_file = 15;
-}
-
-// The plugin writes an encoded CodeGeneratorResponse to stdout.
-message CodeGeneratorResponse {
-  // Error message.  If non-empty, code generation failed.  The plugin process
-  // should exit with status code zero even if it reports an error in this way.
-  //
-  // This should be used to indicate errors in .proto files which prevent the
-  // code generator from generating correct code.  Errors which indicate a
-  // problem in protoc itself -- such as the input CodeGeneratorRequest being
-  // unparseable -- should be reported by writing a message to stderr and
-  // exiting with a non-zero status code.
-  optional string error = 1;
-
-  // Represents a single generated file.
-  message File {
-    // The file name, relative to the output directory.  The name must not
-    // contain "." or ".." components and must be relative, not be absolute (so,
-    // the file cannot lie outside the output directory).  "/" must be used as
-    // the path separator, not "\".
-    //
-    // If the name is omitted, the content will be appended to the previous
-    // file.  This allows the generator to break large files into small chunks,
-    // and allows the generated text to be streamed back to protoc so that large
-    // files need not reside completely in memory at one time.  Note that as of
-    // this writing protoc does not optimize for this -- it will read the entire
-    // CodeGeneratorResponse before writing files to disk.
-    optional string name = 1;
-
-    // If non-empty, indicates that the named file should already exist, and the
-    // content here is to be inserted into that file at a defined insertion
-    // point.  This feature allows a code generator to extend the output
-    // produced by another code generator.  The original generator may provide
-    // insertion points by placing special annotations in the file that look
-    // like:
-    //   @@protoc_insertion_point(NAME)
-    // The annotation can have arbitrary text before and after it on the line,
-    // which allows it to be placed in a comment.  NAME should be replaced with
-    // an identifier naming the point -- this is what other generators will use
-    // as the insertion_point.  Code inserted at this point will be placed
-    // immediately above the line containing the insertion point (thus multiple
-    // insertions to the same point will come out in the order they were added).
-    // The double-@ is intended to make it unlikely that the generated code
-    // could contain things that look like insertion points by accident.
-    //
-    // For example, the C++ code generator places the following line in the
-    // .pb.h files that it generates:
-    //   // @@protoc_insertion_point(namespace_scope)
-    // This line appears within the scope of the file's package namespace, but
-    // outside of any particular class.  Another plugin can then specify the
-    // insertion_point "namespace_scope" to generate additional classes or
-    // other declarations that should be placed in this scope.
-    //
-    // Note that if the line containing the insertion point begins with
-    // whitespace, the same whitespace will be added to every line of the
-    // inserted text.  This is useful for languages like Python, where
-    // indentation matters.  In these languages, the insertion point comment
-    // should be indented the same amount as any inserted code will need to be
-    // in order to work correctly in that context.
-    //
-    // The code generator that generates the initial file and the one which
-    // inserts into it must both run as part of a single invocation of protoc.
-    // Code generators are executed in the order in which they appear on the
-    // command line.
-    //
-    // If |insertion_point| is present, |name| must also be present.
-    optional string insertion_point = 2;
-
-    // The file contents.
-    optional string content = 15;
-  }
-  repeated File file = 15;
-}
diff --git a/src/antlion/controllers/buds_lib/latency.py b/src/antlion/controllers/buds_lib/latency.py
deleted file mode 100644
index 1cd9c8d..0000000
--- a/src/antlion/controllers/buds_lib/latency.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Processes profiling data to output latency numbers."""
-#
-# Type "python latency.py -h" for help
-#
-# Currently the log data is assumed to be in the following format:
-# PROF:<event-id> <timestamp>
-# The <event-id> and <timestamp> can be in the form of any valid
-# (positive)integer literal in Python
-# Examples:
-#   PROF:0x0006 0x00000155e0d043f1
-#   PROF:6 1468355593201
-
-import argparse
-from collections import defaultdict
-import csv
-import logging
-import math
-import os
-import string
-import xml.etree.ElementTree as ET
-
-valid_fname_chars = '-_.()%s%s' % (string.ascii_letters, string.digits)
-PERCENTILE_STEP = 1
-PROFILER_DATA_PREFIX = 'PROF:'
-
-
-class EventPair(object):
-
-    def __init__(self, pair_id, latency, name):
-        self.pair_id = pair_id
-        self.latency = latency
-        self.name = name
-
-
-class LatencyEntry(object):
-
-    def __init__(self, start_timestamp, latency):
-        self.start_timestamp = start_timestamp
-        self.latency = latency
-
-
-def parse_xml(xml_file):
-    """
-    Parse the configuration xml file.
-
-    Returns:
-      event_pairs_by_pair_id: dict mapping event id to event pair object
-      event_pairs_by_start_id: dict mapping starting event to list of event pairs
-                               with that starting event.
-      event_pairs_by_end_id: dict mapping ending event to list of event pairs
-                             with that ending event.
-    """
-    root = ET.parse(xml_file).getroot()
-    event_pairs = root.findall('event-pair')
-    event_pairs_by_pair_id = {}
-    event_pairs_by_start_id = defaultdict(list)
-    event_pairs_by_end_id = defaultdict(list)
-
-    for event_pair in event_pairs:
-        start_evt = root.find(
-            "./event[@id='{0:}']".format(event_pair.attrib['start-event']))
-        end_evt = root.find(
-            "./event[@id='{0:}']".format(event_pair.attrib['end-event']))
-        start = int(start_evt.attrib['id'], 0)
-        end = int(end_evt.attrib['id'], 0)
-        paird_id = start << 32 | end
-        if paird_id in event_pairs_by_pair_id:
-            logging.error('Latency event repeated: start id = %d, end id = %d',
-                          start,
-                          end)
-            continue
-        # Create the output file name base by concatenating:
-        # "input file name base" + start event name + "_to_" + end event name
-        evt_pair_name = start_evt.attrib['name'] + '_to_' + end_evt.attrib[
-            'name']
-        evt_pair_name = [
-            c if c in valid_fname_chars else '_' for c in evt_pair_name
-        ]
-        evt_pair_name = ''.join(evt_pair_name)
-        evt_list = EventPair(paird_id, 0, evt_pair_name)
-        event_pairs_by_pair_id[paird_id] = evt_list
-        event_pairs_by_start_id[start].append(evt_list)
-        event_pairs_by_end_id[end].append(evt_list)
-    return (event_pairs_by_pair_id, event_pairs_by_start_id,
-            event_pairs_by_end_id)
-
-
-def percentile_to_index(num_entries, percentile):
-    """
-    Returns the index in an array corresponding to a percentile.
-
-    Arguments:
-      num_entries: the number of entries in the array.
-      percentile: which percentile to calculate the index for.
-    Returns:
-      ind: the index in the array corresponding to the percentile.
-    """
-    ind = int(math.floor(float(num_entries) * percentile / 100))
-    if ind > 0:
-        ind -= 1
-    return ind
-
-
-def compute_latencies(input_file, event_pairs_by_start_id,
-                      event_pairs_by_end_id):
-    """Parse the input data file and compute latencies."""
-    line_num = 0
-    lat_tables_by_pair_id = defaultdict(list)
-    while True:
-        line_num += 1
-        line = input_file.readline()
-        if not line:
-            break
-        data = line.partition(PROFILER_DATA_PREFIX)[2]
-        if not data:
-            continue
-        try:
-            event_id, timestamp = [int(x, 0) for x in data.split()]
-        except ValueError:
-            logging.error('Badly formed event entry at line #%s: %s', line_num,
-                          line)
-            continue
-        # We use event_pair.latency to temporarily store the timestamp
-        # of the start event
-        for event_pair in event_pairs_by_start_id[event_id]:
-            event_pair.latency = timestamp
-        for event_pair in event_pairs_by_end_id[event_id]:
-            # compute the latency only if we have seen the corresponding
-            # start event already
-            if event_pair.latency:
-                lat_tables_by_pair_id[event_pair.pair_id].append(
-                    LatencyEntry(event_pair.latency,
-                                 timestamp - event_pair.latency))
-                event_pair.latency = 0
-    return lat_tables_by_pair_id
-
-
-def write_data(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id):
-    for event_id, lat_table in lat_tables_by_pair_id.items():
-        event_pair = event_pairs_by_pair_id[event_id]
-        with open(fname_base + '_' + event_pair.name + '_data.csv',
-                  'wb') as out_file:
-            csv_writer = csv.writer(out_file)
-            for dat in lat_table:
-                csv_writer.writerow([dat.start_timestamp, dat.latency])
-
-
-def write_summary(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id):
-    summaries = get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id)
-    for event_id, lat_table in lat_tables_by_pair_id.items():
-        event_pair = event_pairs_by_pair_id[event_id]
-        summary = summaries[event_pair.name]
-        latencies = summary['latencies']
-        num_latencies = summary['num_latencies']
-        with open(fname_base + '_' + event_pair.name + '_summary.txt',
-                  'wb') as out_file:
-            csv_writer = csv.writer(out_file)
-            csv_writer.writerow(['Percentile', 'Latency'])
-            # Write percentile table
-            for percentile in range(1, 101):
-                ind = percentile_to_index(num_latencies, percentile)
-                csv_writer.writerow([percentile, latencies[ind]])
-
-            # Write summary
-            print('\n\nTotal number of samples = {}'.format(num_latencies),
-                  file=out_file)
-            print('Min = {}'.format(summary['min_lat']), file=out_file)
-            print('Max = {}'.format(summary['max_lat']), file=out_file)
-            print('Average = {}'.format(summary['average_lat']), file=out_file)
-            print('Median = {}'.format(summary['median_lat']), file=out_file)
-            print('90 %ile = {}'.format(summary['90pctile']), file=out_file)
-            print('95 %ile = {}'.format(summary['95pctile']), file=out_file)
-
-
-def process_latencies(config_xml, input_file):
-    """
-    End to end function to compute latencies and summaries from input file.
-    Writes latency results to files in current directory.
-
-    Arguments:
-       config_xml: xml file specifying which event pairs to compute latency
-                   btwn.
-       input_file: text file containing the timestamped events, like a log file.
-    """
-    # Parse the event configuration file
-    (event_pairs_by_pair_id, event_pairs_by_start_id,
-     event_pairs_by_end_id) = parse_xml(config_xml)
-    # Compute latencies
-    lat_tables_by_pair_id = compute_latencies(input_file,
-                                              event_pairs_by_start_id,
-                                              event_pairs_by_end_id)
-    fname_base = os.path.splitext(os.path.basename(input_file.name))[0]
-    # Write the latency data and summary to respective files
-    write_data(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id)
-    write_summary(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id)
-
-
-def get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id):
-    """
-    Process significant summaries from a table of latencies.
-
-    Arguments:
-      event_pairs_by_pair_id: dict mapping event id to event pair object
-      lat_tables_by_pair_id: dict mapping event id to latency table
-    Returns:
-      summaries: dict mapping event pair name to significant summary metrics.
-    """
-    summaries = {}
-    for event_id, lat_table in lat_tables_by_pair_id.items():
-        event_summary = {}
-        event_pair = event_pairs_by_pair_id[event_id]
-        latencies = [entry.latency for entry in lat_table]
-        latencies.sort()
-        event_summary['latencies'] = latencies
-        event_summary['num_latencies'] = len(latencies)
-        event_summary['min_lat'] = latencies[0]
-        event_summary['max_lat'] = latencies[-1]
-        event_summary['average_lat'] = sum(latencies) / len(latencies)
-        event_summary['median'] = latencies[len(latencies) // 2]
-        event_summary['90pctile'] = latencies[percentile_to_index(
-            len(latencies), 90)]
-        event_summary['95pctile'] = latencies[percentile_to_index(
-            len(latencies), 95)]
-        summaries[event_pair.name] = event_summary
-    return summaries
-
-
-def get_summaries_from_log(input_file_name, config_xml=None):
-    """
-    End to end function to compute latencies and summaries from input file.
-    Returns a summary dictionary.
-
-    Arguments:
-      input_file_name: text file containing the timestamped events, like a
-                       log file.
-      config_xml: xml file specifying which event pairs to compute latency btwn.
-    Returns:
-      summaries: dict mapping event pair name to significant summary metrics.
-    """
-    config_xml = config_xml or os.path.join(os.path.dirname(__file__),
-                                            'latency.xml')
-    (event_pairs_by_pair_id, event_pairs_by_start_id,
-     event_pairs_by_end_id) = parse_xml(config_xml)
-    # Compute latencies
-    input_file = open(input_file_name, 'r')
-    lat_tables_by_pair_id = compute_latencies(input_file,
-                                              event_pairs_by_start_id,
-                                              event_pairs_by_end_id)
-    return get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id)
-
-
-if __name__ == '__main__':
-    # Parse command-line arguments
-    parser = argparse.ArgumentParser(
-        description='Processes profiling data to output latency numbers')
-    parser.add_argument(
-        '--events-config',
-        type=argparse.FileType('r'),
-        default=os.path.join(os.path.dirname(__file__), 'latency.xml'),
-        help='The configuration XML file for events.'
-             ' If not specified uses latency.xml from current folder')
-    parser.add_argument(
-        'input', type=argparse.FileType('r'), help='The input log')
-    args = parser.parse_args()
-    process_latencies(args.events_config, args.input)
diff --git a/src/antlion/controllers/buds_lib/latency.xml b/src/antlion/controllers/buds_lib/latency.xml
deleted file mode 100644
index 320979b..0000000
--- a/src/antlion/controllers/buds_lib/latency.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0"?>
-<root>
-    <event id="1" name="Button Down" />
-    <event id="3" name="Play/Pause Button Event" />
-    <event id="4" name="A2DP Start Ind" />
-    <event id="6" name="A2DP Start Streaming" />
-    <event id="9" name="AVRCP Play Notification" />
-    <event id="10" name="AVRCP Pause Notification" />
-    <event id="12" name="Voice Cmd Btn Held" />
-    <event id="13" name="Voice Cmd Btn Released" />
-
-    <!-- Event pairs that we are interested in measuring the latency of -->
-    <event-pair start-event="1" end-event="3" />
-    <event-pair start-event="1" end-event="12" />
-    <event-pair start-event="3" end-event="9" />
-    <event-pair start-event="9" end-event="6" />
-    <event-pair start-event="1" end-event="6" />
-    <event-pair start-event="3" end-event="10" />
-    <event-pair start-event="1" end-event="10" />
-    <event-pair start-event="12" end-event="13" />
-    <event-pair start-event="13" end-event="6" />
-</root>
diff --git a/src/antlion/controllers/buds_lib/logserial.py b/src/antlion/controllers/buds_lib/logserial.py
deleted file mode 100644
index 7b71f8d..0000000
--- a/src/antlion/controllers/buds_lib/logserial.py
+++ /dev/null
@@ -1,409 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import re
-import select
-import subprocess
-import sys
-import time
-import uuid
-from logging import Logger
-from threading import Thread
-
-import serial
-from serial.tools import list_ports
-
-from antlion.controllers.buds_lib import tako_trace_logger
-
-logging = tako_trace_logger.TakoTraceLogger(Logger(__file__))
-
-RETRIES = 0
-
-
-class LogSerialException(Exception):
-    """LogSerial Exception."""
-
-
-class PortCheck(object):
-    def get_serial_ports(self):
-        """Gets the computer available serial ports.
-
-        Returns:
-            Dictionary object with all the serial port names.
-        """
-        result = {}
-        ports = list_ports.comports()
-        for port_name, description, address in ports:
-            result[port_name] = (description, address)
-        return result
-
-    # TODO: Clean up this function. The boolean logic can be simplified.
-    def search_port_by_property(self, search_params):
-        """Search ports by a dictionary of the search parameters.
-
-        Args:
-            search_params: Dictionary object with the parameters
-                           to search. i.e:
-                           {'ID_SERIAL_SHORT':'213213',
-                           'ID_USB_INTERFACE_NUM': '01'}
-        Returns:
-            Array with the ports found
-        """
-        ports_result = []
-        for port in self.get_serial_ports():
-            properties = self.get_port_properties(port=port)
-            if properties:
-                properties_exists = True
-                for port_property in search_params:
-                    properties_exists *= (port_property in properties)
-                properties_exists = True if properties_exists == 1 else False
-                if properties_exists:
-                    found = True
-                    for port_property in search_params.keys():
-                        search_value = search_params[port_property]
-                        if properties[port_property] == search_value:
-                            found *= True
-                        else:
-                            found = False
-                            break
-                    found = True if found == 1 else False
-                    if found:
-                        ports_result.append(port)
-        return ports_result
-
-    def get_port_properties(self, port):
-        """Get all the properties from a given port.
-
-        Args:
-            port: String object with the port name. i.e. '/dev/ttyACM1'
-
-        Returns:
-            dictionary object with all the properties.
-        """
-        ports = self.get_serial_ports()
-        if port in ports:
-            result = {}
-            port_address = ports[port][1]
-            property_list = None
-            if sys.platform.startswith('linux') or sys.platform.startswith(
-                    'cygwin'):
-                try:
-                    command = 'udevadm info -q property -n {}'.format(port)
-                    property_list = subprocess.check_output(command, shell=True)
-                    property_list = property_list.decode(errors='replace')
-                except subprocess.CalledProcessError as error:
-                    logging.error(error)
-                if property_list:
-                    properties = filter(None, property_list.split('\n'))
-                    for prop in properties:
-                        p = prop.split('=')
-                        result[p[0]] = p[1]
-            elif sys.platform.startswith('win'):
-                regex = ('(?P<type>[A-Z]*)\sVID\:PID\=(?P<vid>\w*)'
-                         '\:(?P<pid>\w*)\s+(?P<adprop>.*$)')
-                m = re.search(regex, port_address)
-                if m:
-                    result['type'] = m.group('type')
-                    result['vid'] = m.group('vid')
-                    result['pid'] = m.group('pid')
-                    adprop = m.group('adprop').strip()
-                    if adprop:
-                        prop_array = adprop.split(' ')
-                        for prop in prop_array:
-                            p = prop.split('=')
-                            result[p[0]] = p[1]
-                    if 'LOCATION' in result:
-                        interface = int(result['LOCATION'].split('.')[1])
-                        if interface < 10:
-                            result['ID_USB_INTERFACE_NUM'] = '0{}'.format(
-                                interface)
-                        else:
-                            result['ID_USB_INTERFACE_NUM'] = '{}'.format(
-                                interface)
-                    win_vid_pid = '*VID_{}*PID_{}*'.format(result['vid'],
-                                                           result['pid'])
-                    command = (
-                            'powershell gwmi "Win32_USBControllerDevice |' +
-                            ' %{[wmi]($_.Dependent)} |' +
-                            ' Where-Object -Property PNPDeviceID -Like "' +
-                            win_vid_pid + '" |' +
-                            ' Where-Object -Property Service -Eq "usbccgp" |' +
-                            ' Select-Object -Property PNPDeviceID"')
-                    res = subprocess.check_output(command, shell=True)
-                    r = res.decode('ascii')
-                    m = re.search('USB\\\\.*', r)
-                    if m:
-                        result['ID_SERIAL_SHORT'] = (
-                            m.group().strip().split('\\')[2])
-            return result
-
-    def port_exists(self, port):
-        """Check if a serial port exists in the computer by the port name.
-
-        Args:
-            port: String object with the port name. i.e. '/dev/ttyACM1'
-
-        Returns:
-            True if it was found, False if not.
-        """
-        exists = port in self.get_serial_ports()
-        return exists
-
-
-class LogSerial(object):
-    def __init__(self,
-                 port,
-                 baudrate,
-                 bytesize=8,
-                 parity='N',
-                 stopbits=1,
-                 timeout=0.15,
-                 retries=0,
-                 flush_output=True,
-                 terminator='\n',
-                 output_path=None,
-                 serial_logger=None):
-        global RETRIES
-        self.set_log = False
-        self.output_path = None
-        self.set_output_path(output_path)
-        if serial_logger:
-            self.set_logger(serial_logger)
-        self.monitor_port = PortCheck()
-        if self.monitor_port.port_exists(port=port):
-            self.connection_handle = serial.Serial()
-            RETRIES = retries
-            self.reading = True
-            self.log = []
-            self.log_thread = Thread()
-            self.command_ini_index = None
-            self.is_logging = False
-            self.flush_output = flush_output
-            self.terminator = terminator
-            if port:
-                self.connection_handle.port = port
-            if baudrate:
-                self.connection_handle.baudrate = baudrate
-            if bytesize:
-                self.connection_handle.bytesize = bytesize
-            if parity:
-                self.connection_handle.parity = parity
-            if stopbits:
-                self.connection_handle.stopbits = stopbits
-            if timeout:
-                self.connection_handle.timeout = timeout
-            try:
-                self.open()
-            except Exception as e:
-                self.close()
-                logging.error(e)
-        else:
-            raise LogSerialException(
-                'The port {} does not exist'.format(port))
-
-    def set_logger(self, serial_logger):
-        global logging
-        logging = serial_logger
-        self.set_output_path(getattr(logging, 'output_path', '/tmp'))
-        self.set_log = True
-
-    def set_output_path(self, output_path):
-        """Set the output path for the flushed log.
-
-        Args:
-            output_path: String object with the path
-        """
-        if output_path:
-            if os.path.exists(output_path):
-                self.output_path = output_path
-            else:
-                raise LogSerialException('The output path does not exist.')
-
-    def refresh_port_connection(self, port):
-        """Will update the port connection without closing the read thread.
-
-        Args:
-            port: String object with the new port name. i.e. '/dev/ttyACM1'
-
-        Raises:
-            LogSerialException if the port is not alive.
-        """
-        if self.monitor_port.port_exists(port=port):
-            self.connection_handle.port = port
-            self.open()
-        else:
-            raise LogSerialException(
-                'The port {} does not exist'.format(port))
-
-    def is_port_alive(self):
-        """Verify if the current port is alive in the computer.
-
-        Returns:
-            True if its alive, False if its missing.
-        """
-        alive = self.monitor_port.port_exists(port=self.connection_handle.port)
-        return alive
-
-    def open(self):
-        """Will open the connection with the current port settings."""
-        while self.connection_handle.isOpen():
-            self.connection_handle.close()
-            time.sleep(0.5)
-        self.connection_handle.open()
-        if self.flush_output:
-            self.flush()
-        self.start_reading()
-        logging.info('Connection Open')
-
-    def close(self):
-        """Will close the connection and the read thread."""
-        self.stop_reading()
-        if self.connection_handle:
-            self.connection_handle.close()
-        if not self.set_log:
-            logging.flush_log()
-        self.flush_log()
-        logging.info('Connection Closed')
-
-    def flush(self):
-        """Will flush any input from the serial connection."""
-        self.write('\n')
-        self.connection_handle.flushInput()
-        self.connection_handle.flush()
-        flushed = 0
-        while True:
-            ready_r, _, ready_x = (select.select([self.connection_handle], [],
-                                                 [self.connection_handle], 0))
-            if ready_x:
-                logging.exception('exception from serial port')
-                return
-            elif ready_r:
-                flushed += 1
-                # This may cause underlying buffering.
-                self.connection_handle.read(1)
-                # Flush the underlying buffer too.
-                self.connection_handle.flush()
-            else:
-                break
-            if flushed > 0:
-                logging.debug('dropped >{} bytes'.format(flushed))
-
-    def write(self, command, wait_time=0.2):
-        """Will write into the serial connection.
-
-        Args:
-            command: String object with the text to write.
-            wait_time: Float object with the seconds to wait after the
-                       command was issued.
-        """
-        if command:
-            if self.terminator:
-                command += self.terminator
-            self.command_ini_index = len(self.log)
-            self.connection_handle.write(command.encode())
-            if wait_time:
-                time.sleep(wait_time)
-            logging.info('cmd [{}] sent.'.format(command.strip()))
-
-    def flush_log(self):
-        """Will output the log into a CSV file."""
-        if len(self.log) > 0:
-            path = ''
-            if not self.output_path:
-                self.output_path = os.getcwd()
-            elif not os.path.exists(self.output_path):
-                self.output_path = os.getcwd()
-            path = os.path.join(self.output_path,
-                                str(uuid.uuid4()) + '_serial.log')
-            with open(path, 'a') as log_file:
-                for info in self.log:
-                    log_file.write('{}, {}\n'.format(info[0], info[1]))
-
-    def read(self):
-        """Will read from the log the output from the serial connection
-        after a write command was issued. It will take the initial time
-        of the command as a reference.
-
-        Returns:
-            Array object with the log lines.
-        """
-        buf_read = []
-        command_end_index = len(self.log)
-        info = self.query_serial_log(self.command_ini_index, command_end_index)
-        for line in info:
-            buf_read.append(line[1])
-        self.command_ini_index = command_end_index
-        return buf_read
-
-    def get_all_log(self):
-        """Gets the log object that collects the logs.
-
-        Returns:
-            DataFrame object with all the logs.
-        """
-        return self.log
-
-    def query_serial_log(self, from_index, to_index):
-        """Will query the session log from a given time in EPOC format.
-
-        Args:
-            from_timestamp: Double value with the EPOC timestamp to start
-                            the search.
-            to_timestamp: Double value with the EPOC timestamp to finish the
-                          rearch.
-
-        Returns:
-            DataFrame with the result query.
-        """
-        if from_index < to_index:
-            info = self.log[from_index:to_index]
-            return info
-
-    def _start_reading_thread(self):
-        if self.connection_handle.isOpen():
-            self.reading = True
-            while self.reading:
-                try:
-                    data = self.connection_handle.readline().decode('utf-8')
-                    if data:
-                        self.is_logging = True
-                        data.replace('/n', '')
-                        data.replace('/r', '')
-                        data = data.strip()
-                        self.log.append([time.time(), data])
-                    else:
-                        self.is_logging = False
-                except Exception:
-                    time.sleep(1)
-            logging.info('Read thread closed')
-
-    def start_reading(self):
-        """Method to start the log collection."""
-        if not self.log_thread.isAlive():
-            self.log_thread = Thread(target=self._start_reading_thread, args=())
-            self.log_thread.daemon = True
-            try:
-                self.log_thread.start()
-            except(KeyboardInterrupt, SystemExit):
-                self.close()
-        else:
-            logging.warning('Not running log thread, is already alive')
-
-    def stop_reading(self):
-        """Method to stop the log collection."""
-        self.reading = False
-        self.log_thread.join(timeout=600)
diff --git a/src/antlion/controllers/buds_lib/tako_trace_logger.py b/src/antlion/controllers/buds_lib/tako_trace_logger.py
deleted file mode 100644
index 187cfdc..0000000
--- a/src/antlion/controllers/buds_lib/tako_trace_logger.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-
-from antlion import tracelogger
-
-
-class TakoTraceLogger(tracelogger.TraceLogger):
-    def __init__(self, *args, **kwargs):
-        super().__init__(*args, **kwargs)
-        self.d = self.debug
-        self.e = self.error
-        self.i = self.info
-        self.t = self.step
-        self.w = self.warning
-
-    def _logger_level(self, level_name):
-        level = logging.getLevelName(level_name)
-        return lambda *args, **kwargs: self._logger.log(level, *args, **kwargs)
-
-    def step(self, msg, *args, **kwargs):
-        """Delegate a step call to the underlying logger."""
-        self._log_with(self._logger_level('STEP'), 1, msg, *args, **kwargs)
-
-    def device(self, msg, *args, **kwargs):
-        """Delegate a device call to the underlying logger."""
-        self._log_with(self._logger_level('DEVICE'), 1, msg, *args, **kwargs)
-
-    def suite(self, msg, *args, **kwargs):
-        """Delegate a device call to the underlying logger."""
-        self._log_with(self._logger_level('SUITE'), 1, msg, *args, **kwargs)
-
-    def case(self, msg, *args, **kwargs):
-        """Delegate a case call to the underlying logger."""
-        self._log_with(self._logger_level('CASE'), 1, msg, *args, **kwargs)
-
-    def flush_log(self):
-        """This function exists for compatibility with Tako's logserial module.
-
-        Note that flushing the log is handled automatically by python's logging
-        module.
-        """
diff --git a/src/antlion/controllers/buds_lib/test_actions/__init__.py b/src/antlion/controllers/buds_lib/test_actions/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/buds_lib/test_actions/agsa_acts.py b/src/antlion/controllers/buds_lib/test_actions/agsa_acts.py
deleted file mode 100644
index 3d03cbd..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/agsa_acts.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import re
-
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.buds_lib.test_actions.base_test_actions import BaseTestAction
-from antlion.controllers.buds_lib.test_actions.base_test_actions import timed_action
-
-PHONE_DFU_PATH = ('/storage/emulated/0/Android/data/com.google.android'
-                  '.googlequicksearchbox/files/download_cache/apollo.dfu')
-
-AGSA_BROADCAST = (
-    'am  broadcast -a \'action_ota\' --es dfu_url %s --es build_label 9.9.9 '
-    '--ez is_force %s com.google.android.googlequicksearchbox/com.google'
-    '.android.apps.gsa.broadcastreceiver.CommonBroadcastReceiver')
-
-
-class AgsaOTAError(Exception):
-    """OTA Error"""
-
-
-class AgsaTestActions(BaseTestAction):
-    """AGSA test action library."""
-
-    def __init__(self, android_dev, logger=None):
-        """
-        Simple init code to keep the android object for future reference.
-        Args:
-           android_dev: devcontrollers.android_device.AndroidDevice
-        """
-        super(AgsaTestActions, self).__init__(logger)
-        self.dut = android_dev
-
-    @timed_action
-    def _initiate_agsa_ota(self, file_path, destination=None, force=True):
-        """Pushes the dfu file to phone and issues broadcast to start AGSA OTA
-
-        Args:
-            file_path: (string) path of dfu file
-            destination: (string) destination path on the phone uses
-                         $PHONE_DFU_PATH if not specified
-            force: (bool) option to force the issued broadcast?
-        """
-        if not destination:
-            destination = PHONE_DFU_PATH
-        if self.dut.push_file_to_phone(file_path, destination):
-            if force:
-                force = 'true'
-            else:
-                force = 'false'
-
-            command = AGSA_BROADCAST % (destination, force)
-            output = self.dut.adb.shell(command.split())
-            if 'result=0' in output:
-                self.logger.info('Agsa broadcast successful!')
-                return True
-            else:
-                self.logger.error('Agsa broadcast failed')
-                return False
-
-    @timed_action
-    def _wait_for_ota_to_finish(self, timeout=660):
-        """Logcat is continuously read to keep track of the OTA transfer
-
-        Args:
-           timeout: (int) time to wait before timing out.
-
-        Returns:
-            True on success
-
-        Raises: AgsaOTAError if the timeout is reached.
-        """
-        # regex that confirms completion
-        transfer_completion_match = \
-            re.compile('OTA progress: 100 %|OTA img rcvd')
-        # time now + $timeout
-        expiry_time = datetime.datetime.now() + \
-                      datetime.timedelta(seconds=timeout)
-        self.logger.info('Waiting for OTA transfer to complete....')
-        while True:
-            # time now - 1 minute: to be used in getting logs from a minute back
-            now_plus_minute = datetime.datetime.now() - \
-                              datetime.timedelta(seconds=5)
-            try:
-                # grep logcat for 'DeviceLog'
-                filtered_log = self.dut.logcat_filter_message(
-                    now_plus_minute.strftime('%m-%d %H:%M:%S.000'),
-                    'Devicelog:')
-                if filtered_log and \
-                        transfer_completion_match.search(filtered_log):
-                    self.logger.info('Transfer completed!')
-                    break
-            except AdbError:
-                # gets thrown if no matching string is found
-                pass
-            if datetime.datetime.now() > expiry_time:
-                self.logger.error('Timed out waiting for OTA to complete.')
-                raise AgsaOTAError('Timed out waiting for OTA to complete.')
-        return True
-
-    @timed_action
-    def initiate_agsa_and_wait_until_transfer(self, file_path, destination=None,
-                                              force=True, timeout=660):
-        """Calls _initiate_agsa_ota and _wait_for_ota_to_finish
-
-        Returns:
-            True on success and False otherwise
-        """
-        self._initiate_agsa_ota(file_path, destination, force)
-        return self._wait_for_ota_to_finish(timeout)
-
-    @timed_action
-    def install_agsa(self, version, force=False):
-        """
-        Installs the specified version of AGSA if different from the one
-        currently installed, unless force is set to True.
-
-        Args:
-            version: (string) ex: '7.14.21.release'
-            force: (bool) installs only if currently installed version is
-                   different than the one to be installed. True installs
-                   by-passing version check
-        Return:
-            True on Success and False otherwise
-        """
-        # get currently installed version, and install agsa only if different
-        # from what is requested
-        current_version = self.dut.get_agsa_version()
-        if (not (version.replace('alpha', '').replace('release', '')
-                 in current_version)) or force:
-            self.logger.info('Current AGSA version is %s' % current_version)
-            self.logger.info('Installing AGSA version %s...' % version)
-            if self.and_actions.install_agsa(version):
-                self.logger.info('Install success!')
-                return True
-            else:
-                self.logger.error('Failed to install version %s' % version)
-                return False
diff --git a/src/antlion/controllers/buds_lib/test_actions/apollo_acts.py b/src/antlion/controllers/buds_lib/test_actions/apollo_acts.py
deleted file mode 100644
index ac2fa6d..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/apollo_acts.py
+++ /dev/null
@@ -1,616 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-A comprehensive interface for performing test actions on an Apollo device.
-"""
-
-import time
-
-from antlion.controllers.android_lib.tel.tel_utils import initiate_call
-from antlion.controllers.android_lib.tel.tel_utils import wait_for_droid_in_call
-from antlion.controllers.buds_lib.apollo_lib import DeviceError
-from antlion.controllers.buds_lib.test_actions.agsa_acts import AgsaOTAError
-from antlion.controllers.buds_lib.test_actions.base_test_actions import BaseTestAction
-from antlion.controllers.buds_lib.test_actions.base_test_actions import timed_action
-from antlion.controllers.buds_lib.test_actions.bt_utils import BTUtils
-from antlion.libs.utils.timer import TimeRecorder
-from antlion.utils import wait_until
-
-PACKAGE_NAME_AGSA = 'com.google.android.googlequicksearchbox'
-PACKAGE_NAME_GMS = 'com.google.android.gms'
-PACKAGE_NAME_NEARBY = 'com.google.android.gms.policy_nearby'
-PACKAGE_NAME_SETTINGS = 'com.android.settings'
-BISTO_MP_DETECT_HEADER = 'Pixel Buds'
-BISTO_MP_DEVICE_TEXT = 'Pixel Buds'
-BISTO_MP_DETECT_TEXT = BISTO_MP_DETECT_HEADER + BISTO_MP_DEVICE_TEXT
-BISTO_MP_CANCEL_TEXT = 'CANCEL'
-BISTO_MP_CONNECT_TEXT = 'TAP TO CONNECT'
-BISTO_MP_CONNECT_FAIL_TEXT = 'Can\'t connect to'
-BISTO_MP_CONNECT_RETRY_TEXT = 'TRY AGAIN'
-BISTO_MP_CONNECTED_TEXT = 'Now set up your Google Assistant'
-BISTO_MP_CONNECTED_EXIT_TEXT = 'NO THANKS'
-BISTO_MP_EXIT_PROMPT_TEXT = 'Exit setup?'
-BISTO_MP_EXIT_CONFIRM_TEXT = 'EXIT'
-PROFILES_CONNECTED = {
-    'HFP(pri.)': 'TRUE',
-    'A2DP(pri)': 'TRUE',
-}
-PROFILES_DISCONNECTED = {
-    'HFP(pri.)': 'FALSE',
-    'A2DP(pri)': 'FALSE',
-}
-COMP_PROFILE_CONNECTED = {'Comp': 'TRUE'}
-COMP_PROFILE_DISCONNECTED = {'Comp': 'FALSE'}
-AVRCPSTATUS = 'AvrcpPlayPause'
-DEFAULT_TIMEOUT = 60  # wait 60 seconds max for bond/connect.
-DEFAULT_CMD_INTERVAL = 0.5  # default interval between serial commands
-DEFAULT_CMD_RETRY = 5  # default retry times when a command failed.
-DEFAULT_BT_PROFILES = [
-    'HFP Pri', 'HFP Sec', 'A2DP Pri', 'A2DP Sec', 'CTRL', 'AUDIO', 'DEBUG',
-    'TRANS'
-]
-DEFAULT_BT_STATUS = ['A2DP(pri)', 'HFP(pri.)', 'Comp']
-
-
-class TestActsError(Exception):
-    """Exception from Apollo Acts Error."""
-
-
-class ApolloTestActions(BaseTestAction):
-    """Test action class for all Apollo test actions."""
-
-    def __init__(self, apollo_dev, logger=None):
-        """
-        Args:
-             apollo_dev: apollo.lib.apollo_lib.Device the Apollo device
-        """
-        super(ApolloTestActions, self).__init__(logger)
-        self.dut = apollo_dev
-        # need a embedded timer for connection time measurements.
-        self.measurement_timer = TimeRecorder()
-
-    def bluetooth_get_status(self):
-        status = self.dut.get_bt_status()
-        self.logger.info(status)
-
-    def wait_for_bluetooth_disconnection(self, timeout=60):
-        """ Set pairing mode and disconnect.
-
-        This action will wait until the apollo profiles are false.
-
-        Args:
-             timeout: integer, timeout value in seconds.
-        """
-        result = True
-        apollo_status = self.dut.get_bt_status()
-        self.logger.info('Waiting for the disconnection.')
-        time.sleep(1)
-        ini_time = time.time()
-        while len(apollo_status) != len(
-            [s for s in apollo_status.values() if s == 'FALSE']):
-            apollo_status = self.dut.get_bt_status()
-            if (time.time() - ini_time) > timeout:
-                self.logger.warning('Timeout waiting for the disconnection.')
-                return False
-            time.sleep(1)
-        return result
-
-    def pair(self, phone, companion_app=True):
-        """Pairs phone with apollo and validates bluetooth profiles.
-
-        Args:
-            phone: android phone
-            apollo: apollo device
-            companion_app (optional): True if the phone has a companion app
-                                      installed. False otherwise.
-
-        Raises:
-            TestActsError: Bluetooth pairing failed/ Dut BT status check failed.
-        """
-        bt_util = BTUtils()
-        target_addr = self.dut.bluetooth_address
-        if bt_util.android_device_in_connected_state(phone, target_addr):
-            self.logger.info('Already paired and connected, skipping pairing.')
-        else:
-            if bt_util.android_device_in_paired_state(phone, target_addr):
-                self.logger.info(
-                    'Device is paired but not connected, unpair first.')
-                if not bt_util.bt_unpair(phone, self.dut):
-                    raise TestActsError('Unable to unpair the device')
-            bt_util.bt_pair_and_connect(phone, self.dut)
-            self.logger.info('DEVICE PAIRED')
-            if companion_app:
-                profiles = PROFILES_CONNECTED.copy()
-                profiles.update(COMP_PROFILE_CONNECTED)
-            else:
-                profiles = PROFILES_CONNECTED
-            self.logger.info(profiles)
-            if not bt_util.check_device_bt(device=self.dut, profiles=profiles):
-                raise TestActsError('Dut BT status check failed.')
-            else:
-                return True
-
-    def unpair(self, phone, companion_app=True, factory_reset_dut=True):
-        """Unpairs phone from apollo and validates bluetooth profiles.
-
-        Args:
-            phone: android phone
-            apollo: apollo device
-            companion_app (optional): True if the phone has a companion app
-                                      installed. False otherwise.
-
-        Raises:
-            TestActsError: Bluetooth unpairing/Dut BT status check failed.
-        """
-        bt_util = BTUtils()
-        target_addr = self.dut.bluetooth_address
-        if not bt_util.android_device_in_paired_state(phone, target_addr):
-            self.logger.info('Device is already unpaired, skipping unpairing.')
-        else:
-            result = bt_util.bt_unpair(
-                phone, self.dut, factory_reset_dut=factory_reset_dut)
-            if not result:
-                raise TestActsError('Bluetooth unpairing failed.')
-            if companion_app:
-                profiles = PROFILES_DISCONNECTED.copy()
-                profiles.update(COMP_PROFILE_DISCONNECTED)
-            else:
-                profiles = PROFILES_DISCONNECTED
-            if not bt_util.check_device_bt(device=self.dut, profiles=profiles):
-                raise TestActsError('Dut BT status check failed.')
-            else:
-                return True
-
-    def is_paired(self, phone):
-        """Check if the given apollo is paired with the android device.
-
-        Args:
-            phone: android phone
-            apollo: apollo device
-
-        Returns:
-            Bool: True if apollo is paired with the phone.
-        """
-        bt_util = BTUtils()
-        target_addr = self.dut.bluetooth_address
-        return bt_util.android_device_in_paired_state(phone, target_addr)
-
-    def send_music_play_event_and_validate(self):
-        """Send the play event on Apollo and validate the response and DSP
-        Status.
-
-        Raises:
-            TestActsError: Error while playing the music.
-        """
-        play_detection_timeout = 1
-        if self.dut.is_streaming():
-            self.logger.info('Music already streaming. Skipping play event..')
-            return
-        self.logger.info('Playing video...')
-        is_played = self.dut.music_control_events(
-            AVRCPSTATUS, self.dut.apollo_log_regex.AVRCP_PLAY_REGEX)
-        if not is_played:
-            self.logger.error('AVRCP Played status not found')
-            raise TestActsError('AVRCP Played status not found.')
-        wait_until(
-            lambda: self.dut.is_streaming(),
-            play_detection_timeout,
-            sleep_s=0.25)
-        if not self.dut.is_streaming():
-            self.logger.error('Device is NOT in a deviceA2DPStreaming state')
-            raise TestActsError(
-                'Device is NOT in a deviceA2DPStreaming state.')
-
-    def send_music_pause_event_and_validate(self):
-        """Send the pause event on Apollo and validate the responses and DSP
-        Status.
-
-        Raises:
-            TestActsError: Error while pausing the music.
-        """
-        paused_detection_timeout = 10
-        if not self.dut.is_streaming():
-            self.logger.info('Music not streaming. Skipping pause event..')
-            return
-        self.logger.info("Pausing video...")
-        is_paused = self.dut.music_control_events(
-            AVRCPSTATUS, self.dut.apollo_log_regex.AVRCP_PAUSE_REGEX)
-        if not is_paused:
-            self.logger.error('AVRCP Paused statue not found')
-            raise TestActsError('AVRCP Paused status not found.')
-        wait_until(
-            lambda: not self.dut.is_streaming(),
-            paused_detection_timeout,
-            sleep_s=0.25)
-        if self.dut.is_streaming():
-            self.logger.error('Device is still in deviceA2DPStreaming state')
-            raise TestActsError(
-                'Device is still in deviceA2DPStreaming state.')
-
-    def vol_down_and_validate(self):
-        """Send volume down twice and validate by comparing two levels
-
-        Raises:
-            TestActsError: Error
-        """
-        self.logger.info('Decreasing volume')
-        before_vol = self.dut.volume('Down', 1)
-        time.sleep(2)
-        after_vol = self.dut.volume('Down', 1)
-        if not after_vol or not before_vol or after_vol >= before_vol:
-            self.logger.error(
-                'Unable to decrease the volume. Before: %s. After: %s' %
-                (before_vol, after_vol))
-            raise TestActsError('error decreasing volume')
-
-    def vol_up_and_validate(self):
-        """Send volume up twice and validate by comparing two levels
-
-        Raises:
-            TestActsError: Error
-        """
-        self.logger.info('Increasing volume')
-        before_vol = self.dut.volume('Up', 1)
-        time.sleep(2)
-        after_vol = self.dut.volume('Up', 1)
-        if not after_vol or not before_vol or after_vol <= before_vol:
-            self.logger.error(
-                'Unable to increase the volume. Before: %s. After: %s' %
-                (before_vol, after_vol))
-            raise TestActsError('error increasing volume')
-
-    def call_and_validate_ringing(self,
-                                  calling_phone,
-                                  number_to_call,
-                                  call_retries=10):
-        for i in range(call_retries):
-            initiate_call(self.logger, calling_phone, number_to_call)
-            is_calling = wait_for_droid_in_call(
-                self.logger, calling_phone, max_time=10)
-            if is_calling:
-                self.logger.info('Call initiated!')
-                break
-            else:
-                self.logger.warning('Call is not initiating.')
-                if i == call_retries:
-                    self.logger.error('Call initiation retries exhausted')
-                    raise TestActsError(
-                        '%s retries failed to initiate the call' %
-                        (call_retries))
-            self.logger.warning('Retrying call...')
-        # wait for offhook state and return
-        wait_until(
-            (lambda: calling_phone.droid.telecomGetCallState() == 'OFFHOOK'),
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-        self.logger.info('Phone call initiated on %s' % calling_phone.serial)
-
-    def answer_phone_and_validate_call_received(self, receiving_phone):
-        # wait until the phone rings (assumes that a call is initiated prior to
-        # running the command)
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'RINGING',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-        self.logger.info('Ring detected on %s - now answering the call...' %
-                         (receiving_phone.serial))
-        # answer the phone call
-        self.dut.tap()
-        # wait until OFFHOOK state
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'OFFHOOK',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-
-    def hangup_phone_and_validate_call_hung(self, receiving_phone):
-        # wait for phone to be in OFFHOOK state (assumed that a call is answered
-        # and engaged)
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'OFFHOOK',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-        # end the call (post and pre 1663 have different way of ending call)
-        self.logger.info(
-            'Hanging up the call on %s...' % receiving_phone.serial)
-        if self.dut.version < 1663:
-            self.dut.tap()
-        else:
-            self.dut.hold(duration=100)
-        # wait for idle state
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'IDLE',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-
-    @timed_action
-    def factory_reset(self):
-        ret = False
-        try:
-            self.dut.factory_reset()
-            ret = True
-        except DeviceError as ex:
-            self.logger.warning('Failed to reset Apollo: %s' % ex)
-        return ret
-
-    @timed_action
-    def wait_for_magic_pairing_notification(self, android_act, timeout=60):
-        dut_detected = False
-        start_time = time.time()
-        self.logger.info('Waiting for MP prompt: %s' % BISTO_MP_DEVICE_TEXT)
-        while not dut_detected:
-            android_act.dut.ui_util.uia.wait.update()
-            self.sleep(1)
-            if android_act.dut.ui_util.uia(
-                    textContains=BISTO_MP_DETECT_HEADER, enabled=True).exists:
-                if android_act.dut.ui_util.uia(
-                        textContains=BISTO_MP_DEVICE_TEXT,
-                        enabled=True).exists:
-                    self.logger.info('DUT Apollo MP prompt detected!')
-                    dut_detected = True
-                else:
-                    self.logger.info(
-                        'NONE DUT Apollo MP prompt detected! Cancel and RETRY!'
-                    )
-                    android_act.dut.ui_util.click_by_text(BISTO_MP_CANCEL_TEXT)
-            if time.time() - start_time > timeout:
-                break
-        if not dut_detected:
-            self.logger.info(
-                'Failed to get %s MP prompt' % BISTO_MP_DEVICE_TEXT)
-        return dut_detected
-
-    @timed_action
-    def start_magic_pairing(self, android_act, timeout=30, retries=3):
-        paired = False
-        android_act.dut.ui_util.click_by_text(
-            BISTO_MP_CONNECT_TEXT, timeout=timeout)
-        connect_start_time = time.time()
-        count = 0
-        timeout = 30
-
-        while not paired and count < retries:
-            android_act.dut.ui_util.uia.wait.update()
-            self.sleep(1)
-            if time.time() - connect_start_time > timeout:
-                self.logger.info('Time out! %s seconds' % time)
-                android_act.app_force_close_agsa()
-                self.logger.info('Timeout(s): %s' % timeout)
-                break
-            if android_act.dut.ui_util.uia(
-                    textContains=BISTO_MP_CONNECT_FAIL_TEXT,
-                    enabled=True).exists:
-                count += 1
-                self.logger.info('MP FAILED! Retry %s.' % count)
-                android_act.dut.ui_util.click_by_text(
-                    BISTO_MP_CONNECT_RETRY_TEXT)
-                connect_start_time = time.time()
-            elif android_act.dut.ui_util.uia(
-                    textContains=BISTO_MP_CONNECTED_TEXT, enabled=True).exists:
-                self.logger.info('MP SUCCESSFUL! Exiting AGSA...')
-                paired = True
-                android_act.dut.ui_util.click_by_text(
-                    BISTO_MP_CONNECTED_EXIT_TEXT)
-                android_act.dut.ui_util.wait_for_text(
-                    BISTO_MP_EXIT_PROMPT_TEXT)
-                android_act.dut.ui_util.click_by_text(
-                    BISTO_MP_EXIT_CONFIRM_TEXT)
-        return paired
-
-    @timed_action
-    def turn_bluetooth_on(self):
-        self.dut.cmd('pow 1')
-        return True
-
-    @timed_action
-    def turn_bluetooth_off(self):
-        self.dut.cmd('pow 0')
-        return True
-
-    @timed_action
-    def wait_for_bluetooth_a2dp_hfp(self,
-                                    timeout=DEFAULT_TIMEOUT,
-                                    interval=DEFAULT_CMD_INTERVAL):
-        """Wait for BT connection by checking if A2DP and HFP connected.
-
-        This is used for BT pair+connect test.
-
-        Args:
-            timeout: float, timeout value in second.
-            interval: float, float, interval between polling BT profiles.
-            timer: TimeRecorder, time recorder to save the connection time.
-        """
-        # Need to check these two profiles
-        pass_profiles = ['A2DP Pri', 'HFP Pri']
-        # TODO(b/122730302): Change to just raise an error
-        ret = False
-        try:
-            ret = self._wait_for_bluetooth_profile_connection(
-                pass_profiles, timeout, interval, self.measurement_timer)
-        except DeviceError as ex:
-            self.logger.warning('Failed to wait for BT connection: %s' % ex)
-        return ret
-
-    def _wait_for_bluetooth_profile_connection(self, profiles_to_check,
-                                               timeout, interval, timer):
-        """A generic method to wait for specified BT profile connection.
-
-        Args:
-            profiles_to_check: list, profile names (A2DP, HFP, etc.) to be
-                               checked.
-            timeout: float, timeout value in second.
-            interval: float, interval between polling BT profiles.
-            timer: TimeRecorder, time recorder to save the connection time.
-
-        Returns:
-            bool, True if checked profiles are connected, False otherwise.
-        """
-        timer.start_timer(profiles_to_check, force=True)
-        start_time = time.time()
-        while time.time() - start_time < timeout:
-            profiles = self._bluetooth_check_profile_connection()
-            for profile in profiles:
-                if profiles[profile]:
-                    timer.stop_timer(profile)
-            # now check if the specified profile connected.
-            all_connected = True
-            for profile in profiles_to_check:
-                if not profiles[profile]:
-                    all_connected = False
-                    break
-            if all_connected:
-                return True
-            time.sleep(interval)
-        # make sure the profile timer are stopped.
-        timer.stop_timer(profiles_to_check)
-        return False
-
-    def _bluetooth_check_profile_connection(self):
-        """Return profile connection in a boolean dict.
-
-        key=<profile name>, val = T/F
-        """
-        profiles = dict()
-        output = self.dut.get_conn_devices()
-        # need to strip all whitespaces.
-        conn_devs = {}
-
-        for key in output:
-            conn_devs[key.strip()] = output[key].strip()
-        for key in conn_devs:
-            self.logger.info('%s:%s' % (key, conn_devs[key]))
-            if 'XXXXXXXX' in conn_devs[key]:
-                profiles[key] = conn_devs[key]
-            else:
-                profiles[key] = False
-        return profiles
-
-    @timed_action
-    def wait_for_bluetooth_status_connection_all(
-            self, timeout=DEFAULT_TIMEOUT, interval=DEFAULT_CMD_INTERVAL):
-        """Wait for BT connection by checking if A2DP, HFP and COMP connected.
-
-        This is used for BT reconnect test.
-
-        Args:
-            timeout: float, timeout value in second.
-            interval: float, float, interval between polling BT profiles.
-        """
-        ret = False
-        self.measurement_timer.start_timer(DEFAULT_BT_STATUS, force=True)
-        # All profile not connected by default.
-        connected_status = {key: False for key in DEFAULT_BT_STATUS}
-        start_time = time.time()
-        while time.time() < start_time + timeout:
-            try:
-                time.sleep(interval)
-                status = self.dut.get_bt_status()
-                for key in DEFAULT_BT_STATUS:
-                    if (not connected_status[key] and key in status
-                            and 'TRUE' == status[key]):
-                        self.measurement_timer.stop_timer(key)
-                        connected_status[key] = True
-                        self.logger.info(
-                            'BT status %s connected at %fs.' %
-                            (key, self.measurement_timer.elapsed(key)))
-                if False not in connected_status.values():
-                    ret = True
-                    break
-            except DeviceError as ex:
-                self.logger.warning(
-                    'Device exception when waiting for reconnection: %s' % ex)
-        self.measurement_timer.stop_timer(DEFAULT_BT_STATUS)
-        return ret
-
-    def initiate_ota_via_agsa_verify_transfer_completion_in_logcat(
-            self,
-            agsa_action,
-            dfu_path,
-            destination=None,
-            force=True,
-            apply_image=True,
-            reconnect=True):
-        """
-        Starts an OTA by issuing an intent to AGSA after copying the dfu file to
-        the appropriate location on the phone
-
-        Args:
-            agsa_action: projects.agsa.lib.test_actions.agsa_acts
-                         .AgsaTestActions
-            dfu_path: string - absolute path of dfu file
-            destination: string - absolute path of file on phone if not
-                         specified will use
-                         /storage/emulated/0/Android/data/com.google.android
-                         .googlequicksearchbox/files/download_cache/apollo.dfu
-            force: value set in the intent sent to AGSA
-            True if success False otherwise
-        """
-        try:
-            agsa_action.initiate_agsa_and_wait_until_transfer(
-                dfu_path, destination=destination, force=force)
-            if apply_image:
-                # set in case
-                self.dut.set_in_case(reconnect=reconnect)
-        except AgsaOTAError as ex:
-            self.logger.error('Failed to OTA via AGSA %s' % ex)
-            return False
-        except DeviceError as ex:
-            self.logger.error('Failed to bring up device %s' % ex)
-            return False
-        return True
-
-    @timed_action
-    def wait_for_bluetooth_a2dp_hfp_rfcomm_connect(
-            self, address, timeout=DEFAULT_TIMEOUT,
-            interval=DEFAULT_CMD_INTERVAL):
-        """Wait for BT reconnection by checking if A2DP, HFP and COMP connected
-        to the specified address.
-
-        This is used for BT connection switch test.
-
-        Args:
-            address: str, MAC of the address to connect.
-            timeout: float, timeout value in second.
-            interval: float, float, interval between polling BT profiles.
-
-        Returns:
-            True if the specified address is connected. False otherwise.
-        """
-        last_4_hex = address.replace(':', '')[-4:].lower()
-        profiles_to_check = ['HFP Pri', 'A2DP Pri', 'CTRL', 'AUDIO']
-        self.measurement_timer.start_timer(profiles_to_check, force=True)
-        end_time = time.time() + timeout
-        all_connected = True
-        while time.time() < end_time:
-            all_connected = True
-            profiles = self._bluetooth_check_profile_connection()
-            for profile in profiles_to_check:
-                if (profile in profiles and profiles[profile]
-                        and last_4_hex in profiles[profile].lower()):
-                    self.measurement_timer.stop_timer(profile)
-                else:
-                    all_connected = False
-            if all_connected:
-                break
-            time.sleep(interval)
-        # make sure the profile timer are stopped.
-        self.measurement_timer.stop_timer(profiles_to_check)
-
-        return all_connected
diff --git a/src/antlion/controllers/buds_lib/test_actions/audio_utils.py b/src/antlion/controllers/buds_lib/test_actions/audio_utils.py
deleted file mode 100644
index fdc4bfa..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/audio_utils.py
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""A generic library for audio related test actions"""
-
-import datetime
-import time
-
-from antlion import utils
-from antlion.controllers.buds_lib import tako_trace_logger
-
-
-class AudioUtilsError(Exception):
-    """Generic AudioUtils Error."""
-
-
-class AudioUtils(object):
-    """A utility that manages generic audio interactions and actions on one or
-    more devices under test.
-
-    To be maintained such that it is compatible with any devices that pair with
-    phone.
-    """
-
-    def __init__(self):
-        self.logger = tako_trace_logger.TakoTraceLogger()
-
-    def play_audio_into_device(self, audio_file_path, audio_player, dut):
-        """Open mic on DUT, play audio into DUT, close mic on DUT.
-
-        Args:
-            audio_file_path: the path to the audio file to play, relative to the
-                           audio_player
-            audio_player: the device from which to play the audio file
-            dut: the device with the microphone
-
-        Returns:
-            bool: result of opening and closing DUT mic
-        """
-
-        if not dut.open_mic():
-            self.logger.error('DUT open_mic did not return True')
-            return False
-        audio_player.play(audio_file_path)
-        if not dut.close_mic():
-            self.logger.error('DUT close_mic did not return True.')
-            return False
-        return True
-
-    def get_agsa_interpretation_of_audio_file(self, audio_file_path,
-                                              target_interpretation,
-                                              audio_player, dut,
-                                              android_device):
-        """Gets AGSA interpretation from playing audio into DUT.
-
-        **IMPORTANT**: AGSA on android device must be connected to DUT and able
-        to receive info from DUT mic.
-
-        Args:
-          audio_file_path: the path to the audio file to play, relative to the
-                           audio_player
-          target_interpretation: what agsa interpretation should be
-          audio_player: the device from which to play the audio file
-          dut: the device with the microphone
-          android_device: android device to which dut is connected
-
-        Returns:
-          interpretation: agsa interpretation of audio file
-          score: similarity score between interpretation and target
-                 interpretation
-        """
-
-        play_start_time = datetime.datetime.now()
-        interpretation, score = '', 0.0
-        if self.play_audio_into_device(audio_file_path=audio_file_path,
-                                       audio_player=audio_player,
-                                       dut=dut):
-            time.sleep(1)
-            interpretation = android_device.agsa_interpretation(
-                cutoff_time=play_start_time,
-                target_interpretation=target_interpretation,
-                source='bisto')
-            score = utils.string_similarity(target_interpretation,
-                                            interpretation)
-
-        return interpretation, score
diff --git a/src/antlion/controllers/buds_lib/test_actions/base_test_actions.py b/src/antlion/controllers/buds_lib/test_actions/base_test_actions.py
deleted file mode 100644
index 518f9c6..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/base_test_actions.py
+++ /dev/null
@@ -1,194 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Base test action class, provide a base class for representing a collection of
-test actions.
-"""
-
-import datetime
-import inspect
-import time
-
-from antlion.controllers.buds_lib import tako_trace_logger
-from antlion.libs.utils.timer import TimeRecorder
-
-# All methods start with "_" are considered hidden.
-DEFAULT_HIDDEN_ACTION_PREFIX = '_'
-
-
-def timed_action(method):
-    """A common decorator for test actions."""
-
-    def timed(self, *args, **kw):
-        """Log the enter/exit/time of the action method."""
-        func_name = self._convert_default_action_name(method.__name__)
-        if not func_name:
-            func_name = method.__name__
-        self.log_step('%s...' % func_name)
-        self.timer.start_timer(func_name, True)
-        result = method(self, *args, **kw)
-        # TODO: Method run time collected can be used for automatic KPI checks
-        self.timer.stop_timer(func_name)
-        return result
-
-    return timed
-
-
-class TestActionNotFoundError(Exception):
-    pass
-
-
-class BaseTestAction(object):
-    """Class for organizing a collection of test actions.
-
-    Test actions are just normal python methods, and should perform a specified
-    action. @timed_action decorator can log the entry/exit of the test action,
-    and the execution time.
-
-    The BaseTestAction class also provides a mapping between human friendly
-    names and test action methods in order to support configuration base
-    execution. By default, all methods not hidden (not start with "_") is
-    exported as human friendly name by replacing "_" with space.
-
-    Test action method can be called directly, or via
-    _perform_action(<human friendly name>, <args...>)
-    method.
-    """
-
-    @classmethod
-    def _fill_default_action_map(cls):
-        """Parse current class and get all test actions methods."""
-        # a <human readable name>:<method name> map.
-        cls._action_map = dict()
-        for name, _ in inspect.getmembers(cls, inspect.ismethod):
-            act_name = cls._convert_default_action_name(name)
-            if act_name:
-                cls._action_map[act_name] = name
-
-    @classmethod
-    def _convert_default_action_name(cls, func_name):
-        """Default conversion between method name -> human readable action name.
-        """
-        if not func_name.startswith(DEFAULT_HIDDEN_ACTION_PREFIX):
-            act_name = func_name.lower()
-            act_name = act_name.replace('_', ' ')
-            act_name = act_name.title()
-            return act_name.strip()
-        else:
-            return ''
-
-    @classmethod
-    def _add_action_alias(cls, default_act_name, alias):
-        """Add an alias to an existing test action."""
-        if default_act_name in cls._action_map:
-            cls._action_map[alias] = cls._action_map[default_act_name]
-            return True
-        else:
-            return False
-
-    @classmethod
-    def _get_action_names(cls):
-        if not hasattr(cls, '_action_map'):
-            cls._fill_default_action_map()
-        return cls._action_map.keys()
-
-    @classmethod
-    def get_current_time_logcat_format(cls):
-        return datetime.datetime.now().strftime('%m-%d %H:%M:%S.000')
-
-    @classmethod
-    def _action_exists(cls, action_name):
-        """Verify if an human friendly action name exists or not."""
-        if not hasattr(cls, '_action_map'):
-            cls._fill_default_action_map()
-        return action_name in cls._action_map
-
-    @classmethod
-    def _validate_actions(cls, action_list):
-        """Verify if an human friendly action name exists or not.
-
-        Args:
-          :param action_list: list of actions to be validated.
-
-        Returns:
-          tuple of (is valid, list of invalid/non-existent actions)
-        """
-        not_found = []
-        for action_name in action_list:
-            if not cls._action_exists(action_name):
-                not_found.append(action_name)
-        all_valid = False if not_found else True
-        return all_valid, not_found
-
-    def __init__(self, logger=None):
-        if logger is None:
-            self.logger = tako_trace_logger.TakoTraceLogger()
-            self.log_step = self.logger.step
-        else:
-            self.logger = logger
-            self.log_step = self.logger.info
-        self.timer = TimeRecorder()
-        self._fill_default_action_map()
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *args):
-        pass
-
-    def _perform_action(self, action_name, *args, **kwargs):
-        """Perform the specified human readable action."""
-        if action_name not in self._action_map:
-            raise TestActionNotFoundError('Action %s not found this class.'
-                                          % action_name)
-
-        method = self._action_map[action_name]
-        ret = getattr(self, method)(*args, **kwargs)
-        return ret
-
-    @timed_action
-    def print_actions(self):
-        """Example action methods.
-
-        All test action method must:
-            1. return a value. False means action failed, any other value means
-               pass.
-            2. should not start with "_". Methods start with "_" is hidden.
-        All test action method may:
-            1. have optional arguments. Mutable argument can be used to pass
-               value
-            2. raise exceptions. Test case class is expected to handle
-               exceptions
-        """
-        num_acts = len(self._action_map)
-
-        self.logger.info('I can do %d action%s:' %
-                      (num_acts, 's' if num_acts != 1 else ''))
-        for act in self._action_map.keys():
-            self.logger.info(' - %s' % act)
-        return True
-
-    @timed_action
-    def sleep(self, seconds):
-        self.logger.info('%s seconds' % seconds)
-        time.sleep(seconds)
-
-
-if __name__ == '__main__':
-    acts = BaseTestAction()
-    acts.print_actions()
-    acts._perform_action('print actions')
-    print(acts._get_action_names())
diff --git a/src/antlion/controllers/buds_lib/test_actions/bt_utils.py b/src/antlion/controllers/buds_lib/test_actions/bt_utils.py
deleted file mode 100644
index a650e5f..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/bt_utils.py
+++ /dev/null
@@ -1,294 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# TODO: In the future to decide whether to move it to a common directory rather
-# than the one specific to apollo.
-# TODO: The move is contingent on understanding the functions that should be
-# supported by the dut device (sec_device).
-
-"""A generic library with bluetooth related functions. The connection is assumed
-to be between and android phone with any dut (referred to as secondary device)
-device that supports the following calls:
-        sec_device.turn_on_bluetooth()
-        sec_device.is_bt_enabled():
-        sec_device.bluetooth_address
-        sec_device.set_pairing_mode()
-        sec_device.factory_reset()
-
-"""
-import queue
-import time
-from logging import Logger
-
-from antlion import asserts
-from antlion.controllers.buds_lib import tako_trace_logger
-from antlion.utils import TimeoutError
-from antlion.utils import wait_until
-
-# Add connection profile for future devices in this dictionary
-WEARABLE_BT_PROTOCOLS = {
-    'rio': {
-        'Comp. App': 'FALSE',
-        'HFP (pri.)': 'FALSE',
-        'HFP (sec.)': 'FALSE',
-        'A2DP (pri.)': 'FALSE',
-        'A2DP (sec.)': 'FALSE',
-    },
-    'apollo': {
-        'Comp': 'FALSE',
-        'HFP(pri.)': 'FALSE',
-        'HFP(sec.)': 'FALSE',
-        'A2DP(pri)': 'FALSE',
-        'A2DP(sec)': 'FALSE',
-    }
-}
-
-
-class BTUtilsError(Exception):
-    """Generic BTUtils error"""
-
-
-class BTUtils(object):
-    """A utility that provides access to bluetooth controls.
-
-    This class to be maintained as a generic class such that it is compatible
-    with any devices that pair with a phone.
-    """
-
-    def __init__(self):
-        self.default_timeout = 60
-        self.logger = tako_trace_logger.TakoTraceLogger(Logger(__file__))
-
-    def bt_pair_and_connect(self, pri_device, sec_device):
-        """Pair and connect a pri_device to a sec_device.
-
-        Args:
-        pri_device: an android device with sl4a installed.
-        sec_device: a wearable device.
-
-        Returns:
-        (Tuple)True if pair and connect successful. False Otherwise.
-        Time in ms to execute the flow.
-        """
-
-        pair_time = self.bt_pair(pri_device, sec_device)
-        connect_result, connect_time = self.bt_connect(pri_device, sec_device)
-        return connect_result, pair_time + connect_time
-
-    def bt_pair(self, pri_device, sec_device):
-        """Pair a pri_device to a sec_device.
-
-        Args:
-        pri_device: an android device with sl4a installed.
-        sec_device: a wearable device.
-
-        Returns:
-            (Tuple)True if pair successful. False Otherwise.
-            Time in ms to execute the flow.
-         """
-        start_time = time.time()
-        # Enable BT on the primary device if it's not currently ON.
-        if not pri_device.droid.bluetoothCheckState():
-            pri_device.droid.bluetoothToggleState(True)
-            try:
-                pri_device.ed.pop_event(event_name='BluetoothStateChangedOn',
-                                        timeout=10)
-            except queue.Empty:
-                raise BTUtilsError(
-                    'Failed to toggle Bluetooth on the primary device.')
-        sec_device.turn_on_bluetooth()
-        if not sec_device.is_bt_enabled():
-            raise BTUtilsError('Could not turn on Bluetooth on secondary '
-                               'devices')
-        target_addr = sec_device.bluetooth_address
-        sec_device.set_pairing_mode()
-
-        pri_device.droid.bluetoothDiscoverAndBond(target_addr)
-        # Loop until we have bonded successfully or timeout.
-        self.logger.info('Verifying devices are bonded')
-        try:
-            wait_until(lambda: self.android_device_in_paired_state(pri_device,
-                                                                   target_addr),
-                       self.default_timeout)
-        except TimeoutError as err:
-            raise BTUtilsError('bt_pair failed: {}'.format(err))
-        end_time = time.time()
-        return end_time - start_time
-
-    def bt_connect(self, pri_device, sec_device):
-        """Connect a previously paired sec_device to a pri_device.
-
-        Args:
-          pri_device: an android device with sl4a installed.
-          sec_device: a wearable device.
-
-        Returns:
-          (Tuple)True if connect successful. False otherwise.
-          Time in ms to execute the flow.
-        """
-        start_time = end_time = time.time()
-        target_addr = sec_device.bluetooth_address
-        # First check that devices are bonded.
-        paired = False
-        for paired_device in pri_device.droid.bluetoothGetBondedDevices():
-            if paired_device['address'] == target_addr:
-                paired = True
-                break
-        if not paired:
-            self.logger.error('Not paired to %s', sec_device.device_name)
-            return False, 0
-
-        self.logger.info('Attempting to connect.')
-        pri_device.droid.bluetoothConnectBonded(target_addr)
-
-        self.logger.info('Verifying devices are connected')
-        wait_until(
-            lambda: self.android_device_in_connected_state(pri_device,
-                                                           target_addr),
-            self.default_timeout)
-        end_time = time.time()
-        return True, end_time - start_time
-
-    def android_device_in_paired_state(self, device, mac_address):
-        """Check device in paired list."""
-        bonded_devices = device.droid.bluetoothGetBondedDevices()
-        for d in bonded_devices:
-            if d['address'] == mac_address:
-                self.logger.info('Successfully bonded to device')
-                return True
-        return False
-
-    def android_device_in_connected_state(self, device, mac_address):
-        """Check device in connected list."""
-        connected_devices = device.droid.bluetoothGetConnectedDevices()
-        for d in connected_devices:
-            if d['address'] == mac_address:
-                self.logger.info('Successfully connected to device')
-                return True
-        return False
-
-    def bt_unpair(self, pri_device, sec_device, factory_reset_dut=True):
-        """Unpairs two Android devices using bluetooth.
-
-        Args:
-          pri_device: an android device with sl4a installed.
-          sec_device: a wearable device.
-
-        Returns:
-          (Tuple)True: if the devices successfully unpaired.
-          Time in ms to execute the flow.
-        Raises:
-          Error: When devices fail to unpair.
-        """
-        target_address = sec_device.bluetooth_address
-        if not self.android_device_in_paired_state(pri_device, target_address):
-            self.logger.debug('Already unpaired.')
-            return True, 0
-        self.logger.debug('Unpairing from %s' % target_address)
-        start_time = end_time = time.time()
-        asserts.assert_true(
-            pri_device.droid.bluetoothUnbond(target_address),
-            'Failed to request device unpairing.')
-
-        # Check that devices have unpaired successfully.
-        self.logger.debug('Verifying devices are unpaired')
-
-        # Loop until we have unbonded successfully or timeout.
-        wait_until(
-            lambda: self.android_device_in_paired_state(pri_device,
-                                                        target_address),
-            self.default_timeout,
-            condition=False)
-
-        self.logger.info('Successfully unpaired from %s' % target_address)
-        if factory_reset_dut:
-            self.logger.info('Factory reset DUT')
-            sec_device.factory_reset()
-        end_time = time.time()
-        return True, end_time - start_time
-
-    def check_device_bt(self, device, **kwargs):
-        """Check the Bluetooth connection status from device.
-
-        Args:
-          device: a wearable device.
-          **kwargs: additional parameters
-
-        Returns:
-          True: if bt status check success, False otherwise.
-        """
-        if device.dut_type in ['rio', 'apollo']:
-            profiles = kwargs.get('profiles')
-            return self.check_dut_status(device, profiles)
-
-    def check_dut_status(self, device, profiles=None):
-        """Check the Bluetooth connection status from rio/apollo device.
-
-        Args:
-          device: rio/apollo device
-          profiles: A dict of profiles, eg. {'HFP (pri.)': 'TRUE', 'Comp. App':
-            'TRUE', 'A2DP (pri.)': 'TRUE'}
-
-        Returns:
-          True: if bt status check success, False otherwise.
-        """
-        expected = WEARABLE_BT_PROTOCOLS
-        self.logger.info(profiles)
-        for key in profiles:
-            expected[device.dut_type][key] = profiles[key]
-        try:
-            wait_until(lambda: self._compare_profile(device,
-                                                     expected[device.dut_type]),
-                       self.default_timeout)
-        except TimeoutError:
-            status = device.get_bt_status()
-            msg_fmt = self._get_formatted_output(expected[device.dut_type],
-                                                 status)
-            self.logger.error(msg_fmt)
-            return False
-        return True
-
-    def _get_formatted_output(self, expected, actual):
-        """On BT status mismatch generate formatted output string.
-
-        Args:
-          expected: Expected BT status hash.
-          actual: Actual BT status hash from Rio.
-
-        Returns:
-          Formatted mismatch string.
-
-        Raises:
-          Error: When unexpcted parameter encounterd.
-        """
-        msg = ''
-        mismatch_format = '{}: Expected {} Actual {}. '
-        if actual is None:
-            raise BTUtilsError('None is not expected.')
-        for key in expected.keys():
-            if expected[key] != actual[key]:
-                msg += mismatch_format.format(key, expected[key], actual[key])
-        return msg
-
-    def _compare_profile(self, device, expected):
-        """Compare input expected profile with actual."""
-        actual = device.get_bt_status()
-        if actual is None:
-            raise BTUtilsError('None is not expected.')
-        for key in expected.keys():
-            if expected[key] != actual[key]:
-                return False
-        return True
diff --git a/src/antlion/controllers/cellular_lib/AndroidCellularDut.py b/src/antlion/controllers/cellular_lib/AndroidCellularDut.py
deleted file mode 100644
index b9fa887..0000000
--- a/src/antlion/controllers/cellular_lib/AndroidCellularDut.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.android_lib.tel import tel_utils
-from antlion.controllers.cellular_lib import BaseCellularDut
-import os
-
-GET_BUILD_VERSION = 'getprop ro.build.version.release'
-PIXELLOGGER_CONTROL = 'am broadcast -n com.android.pixellogger/.receiver.' \
-                      'AlwaysOnLoggingReceiver -a com.android.pixellogger.' \
-                      'service.logging.LoggingService.' \
-                      'ACTION_CONFIGURE_ALWAYS_ON_LOGGING ' \
-                      '-e intent_key_enable "{}"'
-
-NETWORK_TYPE_TO_BITMASK = {
-    BaseCellularDut.PreferredNetworkType.LTE_ONLY: '01000001000000000000',
-    BaseCellularDut.PreferredNetworkType.NR_LTE: '11000001000000000000',
-    BaseCellularDut.PreferredNetworkType.WCDMA_ONLY: '00000100001110000100',
-}
-
-class AndroidCellularDut(BaseCellularDut.BaseCellularDut):
-    """ Android implementation of the cellular DUT class."""
-    def __init__(self, ad, logger):
-        """ Keeps a handler to the android device.
-
-        Args:
-           ad: Android device handler
-           logger: a handler to the logger object
-        """
-        self.ad = ad
-        self.log = logger
-        logger.info('Initializing Android DUT with baseband version {}'.format(
-            ad.adb.getprop('gsm.version.baseband')))
-
-    def toggle_airplane_mode(self, new_state=True):
-        """ Turns airplane mode on / off.
-
-        Args:
-          new_state: True if airplane mode needs to be enabled.
-        """
-        tel_utils.toggle_airplane_mode(self.log, self.ad, new_state)
-
-    def toggle_data_roaming(self, new_state=True):
-        """ Enables or disables cellular data roaming.
-
-        Args:
-          new_state: True if data roaming needs to be enabled.
-        """
-        tel_utils.toggle_cell_data_roaming(self.ad, new_state)
-
-    def get_rx_tx_power_levels(self):
-        """ Obtains Rx and Tx power levels measured from the DUT.
-
-        Returns:
-            A tuple where the first element is an array with the RSRP value
-            in each Rx chain, and the second element is the Tx power in dBm.
-            Values for invalid or disabled Rx / Tx chains are set to None.
-        """
-        return tel_utils.get_rx_tx_power_levels(self.log, self.ad)
-
-    def set_apn(self, name, apn, type='default'):
-        """ Sets the Access Point Name.
-
-        Args:
-          name: the APN name
-          apn: the APN
-          type: the APN type
-        """
-        self.ad.droid.telephonySetAPN(name, apn, type)
-
-    def set_preferred_network_type(self, type):
-        """ Sets the preferred RAT.
-
-        Args:
-          type: an instance of class PreferredNetworkType
-        """
-
-        # If android version is S or later, uses bit mask to set and return.
-        version = self.ad.adb.shell(GET_BUILD_VERSION)
-        try:
-            version_in_number = int(version)
-            if version_in_number > 11:
-                set_network_cmd = 'cmd phone set-allowed-network-types-for-users '
-                set_network_cmd += NETWORK_TYPE_TO_BITMASK[type]
-                self.ad.adb.shell(set_network_cmd)
-                get_network_cmd = 'cmd phone get-allowed-network-types-for-users'
-                allowed_network = self.ad.adb.shell(get_network_cmd)
-                self.log.info('The allowed network: {}'.format(allowed_network))
-                return
-        except ValueError:
-            self.log.info('The android version is older than S, use sl4a')
-
-        if type == BaseCellularDut.PreferredNetworkType.LTE_ONLY:
-            formatted_type = tel_utils.NETWORK_MODE_LTE_ONLY
-        elif type == BaseCellularDut.PreferredNetworkType.WCDMA_ONLY:
-            formatted_type = tel_utils.NETWORK_MODE_WCDMA_ONLY
-        elif type == BaseCellularDut.PreferredNetworkType.GSM_ONLY:
-            formatted_type = tel_utils.NETWORK_MODE_GSM_ONLY
-        else:
-            raise ValueError('Invalid RAT type.')
-
-        if not self.ad.droid.telephonySetPreferredNetworkTypesForSubscription(
-                formatted_type, self.ad.droid.subscriptionGetDefaultSubId()):
-            self.log.error("Could not set preferred network type.")
-        else:
-            self.log.info("Preferred network type set.")
-
-    def get_telephony_signal_strength(self):
-        """ Wrapper for the method with the same name in tel_utils.
-
-        Will be deprecated and replaced by get_rx_tx_power_levels. """
-        tel_utils.get_telephony_signal_strength(self.ad)
-
-    def start_modem_logging(self):
-        """ Starts on-device log collection. """
-        self.ad.adb.shell('rm /data/vendor/slog/*.* -f')
-        self.ad.adb.shell(PIXELLOGGER_CONTROL.format('true'))
-
-    def stop_modem_logging(self):
-        """ Stops log collection and pulls logs. """
-        output_path = self.ad.device_log_path + '/modem/'
-        os.makedirs(output_path, exist_ok=True)
-        self.ad.adb.shell(PIXELLOGGER_CONTROL.format('false'))
diff --git a/src/antlion/controllers/cellular_lib/BaseCellConfig.py b/src/antlion/controllers/cellular_lib/BaseCellConfig.py
deleted file mode 100644
index 71939fd..0000000
--- a/src/antlion/controllers/cellular_lib/BaseCellConfig.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class BaseCellConfig:
-    """ Base cell configuration class.
-
-    Attributes:
-      output_power: a float indicating the required signal level at the
-          instrument's output.
-      input_power: a float indicating the required signal level at the
-          instrument's input.
-    """
-    # Configuration dictionary keys
-    PARAM_UL_PW = 'pul'
-    PARAM_DL_PW = 'pdl'
-
-    def __init__(self, log):
-        """ Initialize the base station config by setting all its
-            parameters to None.
-        Args:
-            log: logger object.
-        """
-        self.log = log
-        self.output_power = None
-        self.input_power = None
-        self.band = None
-
-    def incorporate(self, new_config):
-        """ Incorporates a different configuration by replacing the current
-            values with the new ones for all the parameters different to None.
-        Args:
-            new_config: 5G cell configuration object.
-        """
-        for attr, value in vars(new_config).items():
-            if value and not hasattr(self, attr):
-                setattr(self, attr, value)
diff --git a/src/antlion/controllers/cellular_lib/BaseCellularDut.py b/src/antlion/controllers/cellular_lib/BaseCellularDut.py
deleted file mode 100644
index 2e677a6..0000000
--- a/src/antlion/controllers/cellular_lib/BaseCellularDut.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-
-
-class PreferredNetworkType(Enum):
-    """ Available preferred network types that can be passed to
-  set_preferred_network_type"""
-    LTE_ONLY = 'lte-only'
-    GSM_ONLY = 'gsm-only'
-    WCDMA_ONLY = 'wcdma-only'
-    NR_LTE = 'nr-lte'
-
-
-class BaseCellularDut():
-    """ Base class for DUTs used with cellular simulators. """
-    def toggle_airplane_mode(self, new_state=True):
-        """ Turns airplane mode on / off.
-
-        Args:
-          new_state: True if airplane mode needs to be enabled.
-        """
-        raise NotImplementedError()
-
-    def toggle_data_roaming(self, new_state=True):
-        """ Enables or disables cellular data roaming.
-
-        Args:
-          new_state: True if data roaming needs to be enabled.
-        """
-        raise NotImplementedError()
-
-    def get_rx_tx_power_levels(self):
-        """ Obtains Rx and Tx power levels measured from the DUT.
-
-        Returns:
-          A tuple where the first element is an array with the RSRP value
-          in each Rx chain, and the second element is the Tx power in dBm.
-          Values for invalid or disabled Rx / Tx chains are set to None.
-        """
-        raise NotImplementedError()
-
-    def set_apn(self, name, apn, type='default'):
-        """ Sets the Access Point Name.
-
-        Args:
-          name: the APN name
-          apn: the APN
-          type: the APN type
-        """
-        raise NotImplementedError()
-
-    def set_preferred_network_type(self, type):
-        """ Sets the preferred RAT.
-
-        Args:
-          type: an instance of class PreferredNetworkType
-        """
-        raise NotImplementedError()
-
-    def get_telephony_signal_strength(self):
-        """ Wrapper for the method with the same name in tel_utils.
-
-        Will be deprecated and replaced by get_rx_tx_power_levels. """
-        raise NotImplementedError()
-
-    def start_modem_logging(self):
-        """ Starts on-device log collection. """
-        raise NotImplementedError()
-
-    def stop_modem_logging(self):
-        """ Stops log collection and pulls logs. """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/cellular_lib/BaseSimulation.py b/src/antlion/controllers/cellular_lib/BaseSimulation.py
deleted file mode 100644
index 043f802..0000000
--- a/src/antlion/controllers/cellular_lib/BaseSimulation.py
+++ /dev/null
@@ -1,741 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from enum import Enum
-
-import numpy as np
-from antlion.controllers import cellular_simulator
-from antlion.controllers.cellular_lib.BaseCellConfig import BaseCellConfig
-
-
-class BaseSimulation(object):
-    """ Base class for cellular connectivity simulations.
-
-    Classes that inherit from this base class implement different simulation
-    setups. The base class contains methods that are common to all simulation
-    configurations.
-
-    """
-
-    NUM_UL_CAL_READS = 3
-    NUM_DL_CAL_READS = 5
-    MAX_BTS_INPUT_POWER = 30
-    MAX_PHONE_OUTPUT_POWER = 23
-    UL_MIN_POWER = -60.0
-
-    # Keys to obtain settings from the test_config dictionary.
-    KEY_CALIBRATION = "calibration"
-    KEY_ATTACH_RETRIES = "attach_retries"
-    KEY_ATTACH_TIMEOUT = "attach_timeout"
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_PATH_FORMAT_STR = 'C:\\Users\\MD8475{}\\Documents\\DAN_configs\\'
-
-    # Time in seconds to wait for the phone to settle
-    # after attaching to the base station.
-    SETTLING_TIME = 10
-
-    # Default time in seconds to wait for the phone to attach to the basestation
-    # after toggling airplane mode. This setting can be changed with the
-    # KEY_ATTACH_TIMEOUT keyword in the test configuration file.
-    DEFAULT_ATTACH_TIMEOUT = 120
-
-    # The default number of attach retries. This setting can be changed with
-    # the KEY_ATTACH_RETRIES keyword in the test configuration file.
-    DEFAULT_ATTACH_RETRIES = 3
-
-    # These two dictionaries allow to map from a string to a signal level and
-    # have to be overridden by the simulations inheriting from this class.
-    UPLINK_SIGNAL_LEVEL_DICTIONARY = {}
-    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {}
-
-    # Units for downlink signal level. This variable has to be overridden by
-    # the simulations inheriting from this class.
-    DOWNLINK_SIGNAL_LEVEL_UNITS = None
-
-    def __init__(
-        self, simulator, log, dut, test_config, calibration_table,
-        nr_mode=None):
-        """ Initializes the Simulation object.
-
-        Keeps a reference to the callbox, log and dut handlers and
-        initializes the class attributes.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-        """
-
-        self.simulator = simulator
-        self.log = log
-        self.dut = dut
-        self.calibration_table = calibration_table
-        self.nr_mode = nr_mode
-
-        # Turn calibration on or off depending on the test config value. If the
-        # key is not present, set to False by default
-        if self.KEY_CALIBRATION not in test_config:
-            self.log.warning('The {} key is not set in the testbed '
-                             'parameters. Setting to off by default. To '
-                             'turn calibration on, include the key with '
-                             'a true/false value.'.format(
-                                 self.KEY_CALIBRATION))
-
-        self.calibration_required = test_config.get(self.KEY_CALIBRATION,
-                                                    False)
-
-        # Obtain the allowed number of retries from the test configs
-        if self.KEY_ATTACH_RETRIES not in test_config:
-            self.log.warning('The {} key is not set in the testbed '
-                             'parameters. Setting to {} by default.'.format(
-                                 self.KEY_ATTACH_RETRIES,
-                                 self.DEFAULT_ATTACH_RETRIES))
-
-        self.attach_retries = test_config.get(self.KEY_ATTACH_RETRIES,
-                                              self.DEFAULT_ATTACH_RETRIES)
-
-        # Obtain the attach timeout from the test configs
-        if self.KEY_ATTACH_TIMEOUT not in test_config:
-            self.log.warning('The {} key is not set in the testbed '
-                             'parameters. Setting to {} by default.'.format(
-                                 self.KEY_ATTACH_TIMEOUT,
-                                 self.DEFAULT_ATTACH_TIMEOUT))
-
-        self.attach_timeout = test_config.get(self.KEY_ATTACH_TIMEOUT,
-                                              self.DEFAULT_ATTACH_TIMEOUT)
-
-        # Create an empty list for cell configs.
-        self.cell_configs = []
-
-        # Store the current calibrated band
-        self.current_calibrated_band = None
-
-        # Path loss measured during calibration
-        self.dl_path_loss = None
-        self.ul_path_loss = None
-
-        # Target signal levels obtained during configuration
-        self.sim_dl_power = None
-        self.sim_ul_power = None
-
-        # Stores RRC status change timer
-        self.rrc_sc_timer = None
-
-        # Set to default APN
-        log.info("Configuring APN.")
-        self.dut.set_apn('test', 'test')
-
-        # Enable roaming on the phone
-        self.dut.toggle_data_roaming(True)
-
-        # Make sure airplane mode is on so the phone won't attach right away
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for airplane mode setting to propagate
-        time.sleep(2)
-
-        # Prepare the simulator for this simulation setup
-        self.setup_simulator()
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-        raise NotImplementedError()
-
-    def attach(self):
-        """ Attach the phone to the basestation.
-
-        Sets a good signal level, toggles airplane mode
-        and waits for the phone to attach.
-
-        Returns:
-            True if the phone was able to attach, False if not.
-        """
-
-        # Turn on airplane mode
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for airplane mode setting to propagate
-        time.sleep(2)
-
-        # Provide a good signal power for the phone to attach easily
-        new_config = BaseCellConfig(self.log)
-        new_config.input_power = -10
-        new_config.output_power = -30
-        self.simulator.configure_bts(new_config)
-        self.cell_configs[0].incorporate(new_config)
-
-        # Try to attach the phone.
-        for i in range(self.attach_retries):
-
-            try:
-
-                # Turn off airplane mode
-                self.dut.toggle_airplane_mode(False)
-
-                # Wait for the phone to attach.
-                self.simulator.wait_until_attached(timeout=self.attach_timeout)
-
-            except cellular_simulator.CellularSimulatorError:
-
-                # The phone failed to attach
-                self.log.info(
-                    "UE failed to attach on attempt number {}.".format(i + 1))
-
-                # Turn airplane mode on to prepare the phone for a retry.
-                self.dut.toggle_airplane_mode(True)
-
-                # Wait for APM to propagate
-                time.sleep(3)
-
-                # Retry
-                if i < self.attach_retries - 1:
-                    # Retry
-                    continue
-                else:
-                    # No more retries left. Return False.
-                    return False
-
-            else:
-                # The phone attached successfully.
-                time.sleep(self.SETTLING_TIME)
-                self.log.info("UE attached to the callbox.")
-                break
-
-        return True
-
-    def detach(self):
-        """ Detach the phone from the basestation.
-
-        Turns airplane mode and resets basestation.
-        """
-
-        # Set the DUT to airplane mode so it doesn't see the
-        # cellular network going off
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for APM to propagate
-        time.sleep(2)
-
-        # Power off basestation
-        self.simulator.detach()
-
-    def stop(self):
-        """  Detach phone from the basestation by stopping the simulation.
-
-        Stop the simulation and turn airplane mode on. """
-
-        # Set the DUT to airplane mode so it doesn't see the
-        # cellular network going off
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for APM to propagate
-        time.sleep(2)
-
-        # Stop the simulation
-        self.simulator.stop()
-
-    def start(self):
-        """ Start the simulation by attaching the phone and setting the
-        required DL and UL power.
-
-        Note that this refers to starting the simulated testing environment
-        and not to starting the signaling on the cellular instruments,
-        which might have been done earlier depending on the cellular
-        instrument controller implementation. """
-
-        if not self.attach():
-            raise RuntimeError('Could not attach to base station.')
-
-        # Starts IP traffic while changing this setting to force the UE to be
-        # in Communication state, as UL power cannot be set in Idle state
-        self.start_traffic_for_calibration()
-
-        # Wait until it goes to communication state
-        self.simulator.wait_until_communication_state()
-
-        # Set uplink power to a low value before going to the actual desired
-        # value. This avoid inconsistencies produced by the hysteresis in the
-        # PA switching points.
-        self.log.info('Setting UL power to -5 dBm before going to the '
-                      'requested value to avoid incosistencies caused by '
-                      'hysteresis.')
-        self.set_uplink_tx_power(-5)
-
-        # Set signal levels obtained from the test parameters
-        self.set_downlink_rx_power(self.sim_dl_power)
-        self.set_uplink_tx_power(self.sim_ul_power)
-
-        # Verify signal level
-        try:
-            rx_power, tx_power = self.dut.get_rx_tx_power_levels()
-
-            if not tx_power or not rx_power[0]:
-                raise RuntimeError('The method return invalid Tx/Rx values.')
-
-            self.log.info('Signal level reported by the DUT in dBm: Tx = {}, '
-                          'Rx = {}.'.format(tx_power, rx_power))
-
-            if abs(self.sim_ul_power - tx_power) > 1:
-                self.log.warning('Tx power at the UE is off by more than 1 dB')
-
-        except RuntimeError as e:
-            self.log.error('Could not verify Rx / Tx levels: %s.' % e)
-
-        # Stop IP traffic after setting the UL power level
-        self.stop_traffic_for_calibration()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Children classes need to call this method first.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        # Setup uplink power
-        ul_power = self.get_uplink_power_from_parameters(parameters)
-
-        # Power is not set on the callbox until after the simulation is
-        # started. Saving this value in a variable for later
-        self.sim_ul_power = ul_power
-
-        # Setup downlink power
-
-        dl_power = self.get_downlink_power_from_parameters(parameters)
-
-        # Power is not set on the callbox until after the simulation is
-        # started. Saving this value in a variable for later
-        self.sim_dl_power = dl_power
-
-    def set_uplink_tx_power(self, signal_level):
-        """ Configure the uplink tx power level
-
-        Args:
-            signal_level: calibrated tx power in dBm
-        """
-        new_config = BaseCellConfig(self.log)
-        new_config.input_power = self.calibrated_uplink_tx_power(
-            self.cell_configs[0], signal_level)
-        self.simulator.configure_bts(new_config)
-        self.cell_configs[0].incorporate(new_config)
-
-    def set_downlink_rx_power(self, signal_level):
-        """ Configure the downlink rx power level
-
-        Args:
-            signal_level: calibrated rx power in dBm
-        """
-        new_config = BaseCellConfig(self.log)
-        new_config.output_power = self.calibrated_downlink_rx_power(
-            self.cell_configs[0], signal_level)
-        self.simulator.configure_bts(new_config)
-        self.cell_configs[0].incorporate(new_config)
-
-    def get_uplink_power_from_parameters(self, parameters):
-        """ Reads uplink power from the parameter dictionary. """
-
-        if BaseCellConfig.PARAM_UL_PW in parameters:
-            value = parameters[BaseCellConfig.PARAM_UL_PW]
-            if value in self.UPLINK_SIGNAL_LEVEL_DICTIONARY:
-                return self.UPLINK_SIGNAL_LEVEL_DICTIONARY[value]
-            else:
-                try:
-                    if isinstance(value[0], str) and value[0] == 'n':
-                        # Treat the 'n' character as a negative sign
-                        return -int(value[1:])
-                    else:
-                        return int(value)
-                except ValueError:
-                    pass
-
-        # If the method got to this point it is because PARAM_UL_PW was not
-        # included in the test parameters or the provided value was invalid.
-        raise ValueError(
-            "The config dictionary must include a key {} with the desired "
-            "uplink power expressed by an integer number in dBm or with one of "
-            "the following values: {}. To indicate negative "
-            "values, use the letter n instead of - sign.".format(
-                BaseCellConfig.PARAM_UL_PW,
-                list(self.UPLINK_SIGNAL_LEVEL_DICTIONARY.keys())))
-
-    def get_downlink_power_from_parameters(self, parameters):
-        """ Reads downlink power from a the parameter dictionary. """
-
-        if BaseCellConfig.PARAM_DL_PW in parameters:
-            value = parameters[BaseCellConfig.PARAM_DL_PW]
-            if value not in self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY:
-                raise ValueError(
-                    "Invalid signal level value {}.".format(value))
-            else:
-                return self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY[value]
-        else:
-            # Use default value
-            power = self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY['excellent']
-            self.log.info("No DL signal level value was indicated in the test "
-                          "parameters. Using default value of {} {}.".format(
-                              power, self.DOWNLINK_SIGNAL_LEVEL_UNITS))
-            return power
-
-    def calibrated_downlink_rx_power(self, bts_config, signal_level):
-        """ Calculates the power level at the instrument's output in order to
-        obtain the required rx power level at the DUT's input.
-
-        If calibration values are not available, returns the uncalibrated signal
-        level.
-
-        Args:
-            bts_config: the current configuration at the base station. derived
-                classes implementations can use this object to indicate power as
-                spectral power density or in other units.
-            signal_level: desired downlink received power, can be either a
-                key value pair, an int or a float
-        """
-
-        # Obtain power value if the provided signal_level is a key value pair
-        if isinstance(signal_level, Enum):
-            power = signal_level.value
-        else:
-            power = signal_level
-
-        # Try to use measured path loss value. If this was not set, it will
-        # throw an TypeError exception
-        try:
-            calibrated_power = round(power + self.dl_path_loss)
-            if calibrated_power > self.simulator.MAX_DL_POWER:
-                self.log.warning(
-                    "Cannot achieve phone DL Rx power of {} dBm. Requested TX "
-                    "power of {} dBm exceeds callbox limit!".format(
-                        power, calibrated_power))
-                calibrated_power = self.simulator.MAX_DL_POWER
-                self.log.warning(
-                    "Setting callbox Tx power to max possible ({} dBm)".format(
-                        calibrated_power))
-
-            self.log.info(
-                "Requested phone DL Rx power of {} dBm, setting callbox Tx "
-                "power at {} dBm".format(power, calibrated_power))
-            time.sleep(2)
-            # Power has to be a natural number so calibration wont be exact.
-            # Inform the actual received power after rounding.
-            self.log.info(
-                "Phone downlink received power is {0:.2f} dBm".format(
-                    calibrated_power - self.dl_path_loss))
-            return calibrated_power
-        except TypeError:
-            self.log.info("Phone downlink received power set to {} (link is "
-                          "uncalibrated).".format(round(power)))
-            return round(power)
-
-    def calibrated_uplink_tx_power(self, bts_config, signal_level):
-        """ Calculates the power level at the instrument's input in order to
-        obtain the required tx power level at the DUT's output.
-
-        If calibration values are not available, returns the uncalibrated signal
-        level.
-
-        Args:
-            bts_config: the current configuration at the base station. derived
-                classes implementations can use this object to indicate power as
-                spectral power density or in other units.
-            signal_level: desired uplink transmitted power, can be either a
-                key value pair, an int or a float
-        """
-
-        # Obtain power value if the provided signal_level is a key value pair
-        if isinstance(signal_level, Enum):
-            power = signal_level.value
-        else:
-            power = signal_level
-
-        # Try to use measured path loss value. If this was not set, it will
-        # throw an TypeError exception
-        try:
-            calibrated_power = round(power - self.ul_path_loss)
-            if calibrated_power < self.UL_MIN_POWER:
-                self.log.warning(
-                    "Cannot achieve phone UL Tx power of {} dBm. Requested UL "
-                    "power of {} dBm exceeds callbox limit!".format(
-                        power, calibrated_power))
-                calibrated_power = self.UL_MIN_POWER
-                self.log.warning(
-                    "Setting UL Tx power to min possible ({} dBm)".format(
-                        calibrated_power))
-
-            self.log.info(
-                "Requested phone UL Tx power of {} dBm, setting callbox Rx "
-                "power at {} dBm".format(power, calibrated_power))
-            time.sleep(2)
-            # Power has to be a natural number so calibration wont be exact.
-            # Inform the actual transmitted power after rounding.
-            self.log.info(
-                "Phone uplink transmitted power is {0:.2f} dBm".format(
-                    calibrated_power + self.ul_path_loss))
-            return calibrated_power
-        except TypeError:
-            self.log.info("Phone uplink transmitted power set to {} (link is "
-                          "uncalibrated).".format(round(power)))
-            return round(power)
-
-    def calibrate(self, band):
-        """ Calculates UL and DL path loss if it wasn't done before.
-
-        The should be already set to the required band before calling this
-        method.
-
-        Args:
-            band: the band that is currently being calibrated.
-        """
-
-        if self.dl_path_loss and self.ul_path_loss:
-            self.log.info("Measurements are already calibrated.")
-
-        # Attach the phone to the base station
-        if not self.attach():
-            self.log.info(
-                "Skipping calibration because the phone failed to attach.")
-            return
-
-        # If downlink or uplink were not yet calibrated, do it now
-        if not self.dl_path_loss:
-            self.dl_path_loss = self.downlink_calibration()
-        if not self.ul_path_loss:
-            self.ul_path_loss = self.uplink_calibration()
-
-        # Detach after calibrating
-        self.detach()
-        time.sleep(2)
-
-    def start_traffic_for_calibration(self):
-        """
-            Starts UDP IP traffic before running calibration. Uses APN_1
-            configured in the phone.
-        """
-        self.simulator.start_data_traffic()
-
-    def stop_traffic_for_calibration(self):
-        """
-            Stops IP traffic after calibration.
-        """
-        self.simulator.stop_data_traffic()
-
-    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
-        """ Computes downlink path loss and returns the calibration value
-
-        The DUT needs to be attached to the base station before calling this
-        method.
-
-        Args:
-            rat: desired RAT to calibrate (matching the label reported by
-                the phone)
-            power_units_conversion_func: a function to convert the units
-                reported by the phone to dBm. needs to take two arguments: the
-                reported signal level and bts. use None if no conversion is
-                needed.
-        Returns:
-            Downlink calibration value and measured DL power.
-        """
-
-        # Check if this parameter was set. Child classes may need to override
-        # this class passing the necessary parameters.
-        if not rat:
-            raise ValueError(
-                "The parameter 'rat' has to indicate the RAT being used as "
-                "reported by the phone.")
-
-        # Save initial output level to restore it after calibration
-        restoration_config = BaseCellConfig(self.log)
-        restoration_config.output_power = self.cell_configs[0].output_power
-
-        # Set BTS to a good output level to minimize measurement error
-        new_config = BaseCellConfig(self.log)
-        new_config.output_power = self.simulator.MAX_DL_POWER - 5
-        self.simulator.configure_bts(new_config)
-
-        # Starting IP traffic
-        self.start_traffic_for_calibration()
-
-        down_power_measured = []
-        for i in range(0, self.NUM_DL_CAL_READS):
-            # For some reason, the RSRP gets updated on Screen ON event
-            signal_strength = self.dut.get_telephony_signal_strength()
-            down_power_measured.append(signal_strength[rat])
-            time.sleep(5)
-
-        # Stop IP traffic
-        self.stop_traffic_for_calibration()
-
-        # Reset bts to original settings
-        self.simulator.configure_bts(restoration_config)
-        time.sleep(2)
-
-        # Calculate the mean of the measurements
-        reported_asu_power = np.nanmean(down_power_measured)
-
-        # Convert from RSRP to signal power
-        if power_units_conversion_func:
-            avg_down_power = power_units_conversion_func(
-                reported_asu_power, self.cell_configs[0])
-        else:
-            avg_down_power = reported_asu_power
-
-        # Calculate Path Loss
-        dl_target_power = self.simulator.MAX_DL_POWER - 5
-        down_call_path_loss = dl_target_power - avg_down_power
-
-        # Validate the result
-        if not 0 < down_call_path_loss < 100:
-            raise RuntimeError(
-                "Downlink calibration failed. The calculated path loss value "
-                "was {} dBm.".format(down_call_path_loss))
-
-        self.log.info(
-            "Measured downlink path loss: {} dB".format(down_call_path_loss))
-
-        return down_call_path_loss
-
-    def uplink_calibration(self):
-        """ Computes uplink path loss and returns the calibration value
-
-        The DUT needs to be attached to the base station before calling this
-        method.
-
-        Returns:
-            Uplink calibration value and measured UL power
-        """
-
-        # Save initial input level to restore it after calibration
-        restoration_config = BaseCellConfig(self.log)
-        restoration_config.input_power = self.cell_configs[0].input_power
-
-        # Set BTS1 to maximum input allowed in order to perform
-        # uplink calibration
-        target_power = self.MAX_PHONE_OUTPUT_POWER
-        new_config = BaseCellConfig(self.log)
-        new_config.input_power = self.MAX_BTS_INPUT_POWER
-        self.simulator.configure_bts(new_config)
-
-        # Start IP traffic
-        self.start_traffic_for_calibration()
-
-        up_power_per_chain = []
-        # Get the number of chains
-        cmd = 'MONITOR? UL_PUSCH'
-        uplink_meas_power = self.anritsu.send_query(cmd)
-        str_power_chain = uplink_meas_power.split(',')
-        num_chains = len(str_power_chain)
-        for ichain in range(0, num_chains):
-            up_power_per_chain.append([])
-
-        for i in range(0, self.NUM_UL_CAL_READS):
-            uplink_meas_power = self.anritsu.send_query(cmd)
-            str_power_chain = uplink_meas_power.split(',')
-
-            for ichain in range(0, num_chains):
-                if (str_power_chain[ichain] == 'DEACTIVE'):
-                    up_power_per_chain[ichain].append(float('nan'))
-                else:
-                    up_power_per_chain[ichain].append(
-                        float(str_power_chain[ichain]))
-
-            time.sleep(3)
-
-        # Stop IP traffic
-        self.stop_traffic_for_calibration()
-
-        # Reset bts to original settings
-        self.simulator.configure_bts(restoration_config)
-        time.sleep(2)
-
-        # Phone only supports 1x1 Uplink so always chain 0
-        avg_up_power = np.nanmean(up_power_per_chain[0])
-        if np.isnan(avg_up_power):
-            raise RuntimeError(
-                "Calibration failed because the callbox reported the chain to "
-                "be deactive.")
-
-        up_call_path_loss = target_power - avg_up_power
-
-        # Validate the result
-        if not 0 < up_call_path_loss < 100:
-            raise RuntimeError(
-                "Uplink calibration failed. The calculated path loss value "
-                "was {} dBm.".format(up_call_path_loss))
-
-        self.log.info(
-            "Measured uplink path loss: {} dB".format(up_call_path_loss))
-
-        return up_call_path_loss
-
-    def load_pathloss_if_required(self):
-        """ If calibration is required, try to obtain the pathloss values from
-        the calibration table and measure them if they are not available. """
-        # Invalidate the previous values
-        self.dl_path_loss = None
-        self.ul_path_loss = None
-
-        # Load the new ones
-        if self.calibration_required:
-
-            band = self.cell_configs[0].band
-
-            # Try loading the path loss values from the calibration table. If
-            # they are not available, use the automated calibration procedure.
-            try:
-                self.dl_path_loss = self.calibration_table[band]["dl"]
-                self.ul_path_loss = self.calibration_table[band]["ul"]
-            except KeyError:
-                self.calibrate(band)
-
-            # Complete the calibration table with the new values to be used in
-            # the next tests.
-            if band not in self.calibration_table:
-                self.calibration_table[band] = {}
-
-            if "dl" not in self.calibration_table[band] and self.dl_path_loss:
-                self.calibration_table[band]["dl"] = self.dl_path_loss
-
-            if "ul" not in self.calibration_table[band] and self.ul_path_loss:
-                self.calibration_table[band]["ul"] = self.ul_path_loss
-
-    def maximum_downlink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-        simulation state.
-
-        Because thoughput is dependent on the RAT, this method needs to be
-        implemented by children classes.
-
-        Returns:
-            Maximum throughput in mbps
-        """
-        raise NotImplementedError()
-
-    def maximum_uplink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-        simulation state.
-
-        Because thoughput is dependent on the RAT, this method needs to be
-        implemented by children classes.
-
-        Returns:
-            Maximum throughput in mbps
-        """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/cellular_lib/GsmSimulation.py b/src/antlion/controllers/cellular_lib/GsmSimulation.py
deleted file mode 100644
index f0ebf03..0000000
--- a/src/antlion/controllers/cellular_lib/GsmSimulation.py
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ntpath
-
-import time
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_DCS1800
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_EGSM900
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_GSM850
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_RGSM900
-from antlion.controllers.anritsu_lib.md8475a import BtsGprsMode
-from antlion.controllers.anritsu_lib.md8475a import BtsNumber
-from antlion.controllers.anritsu_lib import md8475_cellular_simulator as anritsusim
-from antlion.controllers.cellular_lib import BaseCellularDut
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib.BaseCellConfig import BaseCellConfig
-
-
-class GsmSimulation(BaseSimulation):
-    """ Single base station GSM. """
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-
-    GSM_BASIC_SIM_FILE = 'SIM_default_GSM.wnssp'
-
-    GSM_CELL_FILE = 'CELL_GSM_config.wnscp'
-
-    # Configuration dictionary keys
-    PARAM_BAND = "band"
-    PARAM_GPRS = "gprs"
-    PARAM_EGPRS = "edge"
-    PARAM_NO_GPRS = "nogprs"
-    PARAM_SLOTS = "slots"
-
-    bands_parameter_mapping = {
-        '850': GSM_BAND_GSM850,
-        '900': GSM_BAND_EGSM900,
-        '1800': GSM_BAND_DCS1800,
-        '1900': GSM_BAND_RGSM900
-    }
-
-    def __init__(self, simulator, log, dut, test_config, calibration_table):
-        """ Initializes the simulator for a single-carrier GSM simulation.
-
-        Loads a simple LTE simulation environment with 1 basestation. It also
-        creates the BTS handle so we can change the parameters as desired.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-
-        """
-        # The GSM simulation relies on the cellular simulator to be a MD8475
-        if not isinstance(self.simulator, anritsusim.MD8475CellularSimulator):
-            raise ValueError('The GSM simulation relies on the simulator to '
-                             'be an Anritsu MD8475 A/B instrument.')
-
-        # The Anritsu controller needs to be unwrapped before calling
-        # super().__init__ because setup_simulator() requires self.anritsu and
-        # will be called during the parent class initialization.
-        self.anritsu = self.simulator.anritsu
-        self.bts1 = self.anritsu.get_BTS(BtsNumber.BTS1)
-
-        super().__init__(simulator, log, dut, test_config, calibration_table)
-
-        self.dut.set_preferred_network_type(
-            BaseCellularDut.PreferredNetworkType.GSM_ONLY)
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-
-        # Load callbox config files
-        callbox_config_path = self.CALLBOX_PATH_FORMAT_STR.format(
-            self.anritsu._md8475_version)
-
-        self.anritsu.load_simulation_paramfile(
-            ntpath.join(callbox_config_path, self.GSM_BASIC_SIM_FILE))
-        self.anritsu.load_cell_paramfile(
-            ntpath.join(callbox_config_path, self.GSM_CELL_FILE))
-
-        # Start simulation if it wasn't started
-        self.anritsu.start_simulation()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Processes GSM configuration parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        # Don't call super() because Gsm doesn't control Tx power.
-
-        # Setup band
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include key '{}' with the "
-                "required band number.".format(self.PARAM_BAND))
-
-        self.set_band(self.bts1, parameters[self.PARAM_BAND])
-        self.load_pathloss_if_required()
-
-        # Setup GPRS mode
-
-        if self.PARAM_GPRS in parameters:
-            self.bts1.gsm_gprs_mode = BtsGprsMode.GPRS
-        elif self.PARAM_EGPRS in parameters:
-            self.bts1.gsm_gprs_mode = BtsGprsMode.EGPRS
-        elif self.PARAM_NO_GPRS in parameters:
-            self.bts1.gsm_gprs_mode = BtsGprsMode.NO_GPRS
-        else:
-            raise ValueError(
-                "GPRS mode needs to be indicated in the config dictionary by "
-                "including either {}, {} or {} as a key.".format(
-                    self.PARAM_GPRS, self.PARAM_EGPRS, self.PARAM_NO_GPRS))
-
-        # Setup slot allocation
-        if self.PARAM_SLOTS not in parameters or len(
-                parameters[self.PARAM_SLOTS]) != 2:
-            raise ValueError(
-                "The config dictionary must include key {} with a list of two "
-                "int values indicating DL and UL slots.".format(
-                    self.PARAM_SLOTS))
-        values = parameters[self.PARAM_SLOTS]
-        self.bts1.gsm_slots = (int(values[0]), int(values[1]))
-
-    def set_band(self, bts, band):
-        """ Sets the band used for communication.
-
-        Args:
-            bts: basestation handle
-            band: desired band
-        """
-
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
diff --git a/src/antlion/controllers/cellular_lib/LteCellConfig.py b/src/antlion/controllers/cellular_lib/LteCellConfig.py
deleted file mode 100644
index 8666f75..0000000
--- a/src/antlion/controllers/cellular_lib/LteCellConfig.py
+++ /dev/null
@@ -1,488 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import antlion.controllers.cellular_lib.BaseCellConfig as base_cell
-import antlion.controllers.cellular_lib.LteSimulation as lte_sim
-import math
-
-
-class LteCellConfig(base_cell.BaseCellConfig):
-    """ Extension of the BaseBtsConfig to implement parameters that are
-         exclusive to LTE.
-
-    Attributes:
-        band: an integer indicating the required band number.
-        dlul_config: an integer indicating the TDD config number.
-        ssf_config: an integer indicating the Special Sub-Frame config.
-        bandwidth: a float indicating the required channel bandwidth.
-        mimo_mode: an instance of LteSimulation.MimoMode indicating the
-            required MIMO mode for the downlink signal.
-        transmission_mode: an instance of LteSimulation.TransmissionMode
-            indicating the required TM.
-        scheduling_mode: an instance of LteSimulation.SchedulingMode
-            indicating whether to use Static or Dynamic scheduling.
-        dl_rbs: an integer indicating the number of downlink RBs
-        ul_rbs: an integer indicating the number of uplink RBs
-        dl_mcs: an integer indicating the MCS for the downlink signal
-        ul_mcs: an integer indicating the MCS for the uplink signal
-        dl_256_qam_enabled: a boolean indicating if 256 QAM is enabled
-        ul_64_qam_enabled: a boolean indicating if 256 QAM is enabled
-        mac_padding: a boolean indicating whether RBs should be allocated
-            when there is no user data in static scheduling
-        dl_channel: an integer indicating the downlink channel number
-        cfi: an integer indicating the Control Format Indicator
-        paging_cycle: an integer indicating the paging cycle duration in
-            milliseconds
-        phich: a string indicating the PHICH group size parameter
-        drx_connected_mode: a boolean indicating whether cDRX mode is
-            on or off
-        drx_on_duration_timer: number of PDCCH subframes representing
-            DRX on duration
-        drx_inactivity_timer: number of PDCCH subframes to wait before
-            entering DRX mode
-        drx_retransmission_timer: number of consecutive PDCCH subframes
-            to wait for retransmission
-        drx_long_cycle: number of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        drx_long_cycle_offset: number representing offset in range
-            0 to drx_long_cycle - 1
-    """
-    PARAM_FRAME_CONFIG = "tddconfig"
-    PARAM_BW = "bw"
-    PARAM_SCHEDULING = "scheduling"
-    PARAM_SCHEDULING_STATIC = "static"
-    PARAM_SCHEDULING_DYNAMIC = "dynamic"
-    PARAM_PATTERN = "pattern"
-    PARAM_TM = "tm"
-    PARAM_BAND = "band"
-    PARAM_MIMO = "mimo"
-    PARAM_DL_MCS = 'dlmcs'
-    PARAM_UL_MCS = 'ulmcs'
-    PARAM_SSF = 'ssf'
-    PARAM_CFI = 'cfi'
-    PARAM_PAGING = 'paging'
-    PARAM_PHICH = 'phich'
-    PARAM_DRX = 'drx'
-    PARAM_PADDING = 'mac_padding'
-    PARAM_DL_256_QAM_ENABLED = "256_qam_dl_enabled"
-    PARAM_UL_64_QAM_ENABLED = "64_qam_ul_enabled"
-    PARAM_DL_EARFCN = 'dl_earfcn'
-
-    def __init__(self, log):
-        """ Initialize the base station config by setting all its
-        parameters to None.
-        Args:
-            log: logger object.
-        """
-        super().__init__(log)
-        self.band = None
-        self.dlul_config = None
-        self.ssf_config = None
-        self.bandwidth = None
-        self.mimo_mode = None
-        self.transmission_mode = None
-        self.scheduling_mode = None
-        self.dl_rbs = None
-        self.ul_rbs = None
-        self.dl_mcs = None
-        self.ul_mcs = None
-        self.dl_256_qam_enabled = None
-        self.ul_64_qam_enabled = None
-        self.mac_padding = None
-        self.dl_channel = None
-        self.cfi = None
-        self.paging_cycle = None
-        self.phich = None
-        self.drx_connected_mode = None
-        self.drx_on_duration_timer = None
-        self.drx_inactivity_timer = None
-        self.drx_retransmission_timer = None
-        self.drx_long_cycle = None
-        self.drx_long_cycle_offset = None
-
-    def __str__(self):
-        return str(vars(self))
-
-    def configure(self, parameters):
-        """ Configures an LTE cell using a dictionary of parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        # Setup band
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with "
-                "the required band number.".format(self.PARAM_BAND))
-
-        self.band = parameters[self.PARAM_BAND]
-
-        if self.PARAM_DL_EARFCN not in parameters:
-            band = int(self.band)
-            channel = int(lte_sim.LteSimulation.LOWEST_DL_CN_DICTIONARY[band] +
-                          lte_sim.LteSimulation.LOWEST_DL_CN_DICTIONARY[band +
-                                                                        1]) / 2
-            self.log.warning(
-                "Key '{}' was not set. Using center band channel {} by default."
-                .format(self.PARAM_DL_EARFCN, channel))
-            self.dl_channel = channel
-        else:
-            self.dl_channel = parameters[self.PARAM_DL_EARFCN]
-
-        # Set TDD-only configs
-        if self.get_duplex_mode() == lte_sim.DuplexMode.TDD:
-
-            # Sub-frame DL/UL config
-            if self.PARAM_FRAME_CONFIG not in parameters:
-                raise ValueError("When a TDD band is selected the frame "
-                                 "structure has to be indicated with the '{}' "
-                                 "key with a value from 0 to 6.".format(
-                                     self.PARAM_FRAME_CONFIG))
-
-            self.dlul_config = int(parameters[self.PARAM_FRAME_CONFIG])
-
-            # Special Sub-Frame configuration
-            if self.PARAM_SSF not in parameters:
-                self.log.warning(
-                    'The {} parameter was not provided. Setting '
-                    'Special Sub-Frame config to 6 by default.'.format(
-                        self.PARAM_SSF))
-                self.ssf_config = 6
-            else:
-                self.ssf_config = int(parameters[self.PARAM_SSF])
-
-        # Setup bandwidth
-        if self.PARAM_BW not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter {} with an "
-                "int value (to indicate 1.4 MHz use 14).".format(
-                    self.PARAM_BW))
-
-        bw = float(parameters[self.PARAM_BW])
-
-        if abs(bw - 14) < 0.00000000001:
-            bw = 1.4
-
-        self.bandwidth = bw
-
-        # Setup mimo mode
-        if self.PARAM_MIMO not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter '{}' with the "
-                "mimo mode.".format(self.PARAM_MIMO))
-
-        for mimo_mode in lte_sim.MimoMode:
-            if parameters[self.PARAM_MIMO] == mimo_mode.value:
-                self.mimo_mode = mimo_mode
-                break
-        else:
-            raise ValueError("The value of {} must be one of the following:"
-                             "1x1, 2x2 or 4x4.".format(self.PARAM_MIMO))
-
-        # Setup transmission mode
-        if self.PARAM_TM not in parameters:
-            raise ValueError(
-                "The config dictionary must include key {} with an "
-                "int value from 1 to 4 indicating transmission mode.".format(
-                    self.PARAM_TM))
-
-        for tm in lte_sim.TransmissionMode:
-            if parameters[self.PARAM_TM] == tm.value[2:]:
-                self.transmission_mode = tm
-                break
-        else:
-            raise ValueError(
-                "The {} key must have one of the following values:"
-                "1, 2, 3, 4, 7, 8 or 9.".format(self.PARAM_TM))
-
-        # Setup scheduling mode
-        if self.PARAM_SCHEDULING not in parameters:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-            self.log.warning(
-                "The test config does not include the '{}' key. Setting to "
-                "static by default.".format(self.PARAM_SCHEDULING))
-        elif parameters[
-                self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_DYNAMIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.DYNAMIC
-        elif parameters[self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_STATIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-        else:
-            raise ValueError("Key '{}' must have a value of "
-                             "'dynamic' or 'static'.".format(
-                                 self.PARAM_SCHEDULING))
-
-        if self.scheduling_mode == lte_sim.SchedulingMode.STATIC:
-
-            if self.PARAM_PADDING not in parameters:
-                self.log.warning(
-                    "The '{}' parameter was not set. Enabling MAC padding by "
-                    "default.".format(self.PARAM_PADDING))
-                self.mac_padding = True
-            else:
-                self.mac_padding = parameters[self.PARAM_PADDING]
-
-            if self.PARAM_PATTERN not in parameters:
-                self.log.warning(
-                    "The '{}' parameter was not set, using 100% RBs for both "
-                    "DL and UL. To set the percentages of total RBs include "
-                    "the '{}' key with a list of two ints indicating downlink "
-                    "and uplink percentages.".format(self.PARAM_PATTERN,
-                                                     self.PARAM_PATTERN))
-                dl_pattern = 100
-                ul_pattern = 100
-            else:
-                dl_pattern = int(parameters[self.PARAM_PATTERN][0])
-                ul_pattern = int(parameters[self.PARAM_PATTERN][1])
-
-            if not (0 <= dl_pattern <= 100 and 0 <= ul_pattern <= 100):
-                raise ValueError(
-                    "The scheduling pattern parameters need to be two "
-                    "positive numbers between 0 and 100.")
-
-            self.dl_rbs, self.ul_rbs = (self.allocation_percentages_to_rbs(
-                dl_pattern, ul_pattern))
-
-            # Check if 256 QAM is enabled for DL MCS
-            if self.PARAM_DL_256_QAM_ENABLED not in parameters:
-                self.log.warning("The key '{}' is not set in the test config. "
-                                 "Setting to false by default.".format(
-                                     self.PARAM_DL_256_QAM_ENABLED))
-
-            self.dl_256_qam_enabled = parameters.get(
-                self.PARAM_DL_256_QAM_ENABLED, False)
-
-            # Look for a DL MCS configuration in the test parameters. If it is
-            # not present, use a default value.
-            if self.PARAM_DL_MCS in parameters:
-                self.dl_mcs = int(parameters[self.PARAM_DL_MCS])
-            else:
-                self.log.warning(
-                    'The test config does not include the {} key. Setting '
-                    'to the max value by default'.format(self.PARAM_DL_MCS))
-                if self.dl_256_qam_enabled and self.bandwidth == 1.4:
-                    self.dl_mcs = 26
-                elif (not self.dl_256_qam_enabled and self.mac_padding
-                      and self.bandwidth != 1.4):
-                    self.dl_mcs = 28
-                else:
-                    self.dl_mcs = 27
-
-            # Check if 64 QAM is enabled for UL MCS
-            if self.PARAM_UL_64_QAM_ENABLED not in parameters:
-                self.log.warning("The key '{}' is not set in the config file. "
-                                 "Setting to false by default.".format(
-                                     self.PARAM_UL_64_QAM_ENABLED))
-
-            self.ul_64_qam_enabled = parameters.get(
-                self.PARAM_UL_64_QAM_ENABLED, False)
-
-            # Look for an UL MCS configuration in the test parameters. If it is
-            # not present, use a default value.
-            if self.PARAM_UL_MCS in parameters:
-                self.ul_mcs = int(parameters[self.PARAM_UL_MCS])
-            else:
-                self.log.warning(
-                    'The test config does not include the {} key. Setting '
-                    'to the max value by default'.format(self.PARAM_UL_MCS))
-                if self.ul_64_qam_enabled:
-                    self.ul_mcs = 28
-                else:
-                    self.ul_mcs = 23
-
-        # Configure the simulation for DRX mode
-        if self.PARAM_DRX in parameters and len(
-                parameters[self.PARAM_DRX]) == 5:
-            self.drx_connected_mode = True
-            self.drx_on_duration_timer = parameters[self.PARAM_DRX][0]
-            self.drx_inactivity_timer = parameters[self.PARAM_DRX][1]
-            self.drx_retransmission_timer = parameters[self.PARAM_DRX][2]
-            self.drx_long_cycle = parameters[self.PARAM_DRX][3]
-            try:
-                long_cycle = int(parameters[self.PARAM_DRX][3])
-                long_cycle_offset = int(parameters[self.PARAM_DRX][4])
-                if long_cycle_offset in range(0, long_cycle):
-                    self.drx_long_cycle_offset = long_cycle_offset
-                else:
-                    self.log.error(
-                        ("The cDRX long cycle offset must be in the "
-                         "range 0 to (long cycle  - 1). Setting "
-                         "long cycle offset to 0"))
-                    self.drx_long_cycle_offset = 0
-
-            except ValueError:
-                self.log.error(("cDRX long cycle and long cycle offset "
-                                "must be integers. Disabling cDRX mode."))
-                self.drx_connected_mode = False
-        else:
-            self.log.warning(
-                ("DRX mode was not configured properly. "
-                 "Please provide a list with the following values: "
-                 "1) DRX on duration timer "
-                 "2) Inactivity timer "
-                 "3) Retransmission timer "
-                 "4) Long DRX cycle duration "
-                 "5) Long DRX cycle offset "
-                 "Example: [2, 6, 16, 20, 0]."))
-
-        # Channel Control Indicator
-        if self.PARAM_CFI not in parameters:
-            self.log.warning('The {} parameter was not provided. Setting '
-                             'CFI to BESTEFFORT.'.format(self.PARAM_CFI))
-            self.cfi = 'BESTEFFORT'
-        else:
-            self.cfi = parameters[self.PARAM_CFI]
-
-        # PHICH group size
-        if self.PARAM_PHICH not in parameters:
-            self.log.warning('The {} parameter was not provided. Setting '
-                             'PHICH group size to 1 by default.'.format(
-                                 self.PARAM_PHICH))
-            self.phich = '1'
-        else:
-            if parameters[self.PARAM_PHICH] == '16':
-                self.phich = '1/6'
-            elif parameters[self.PARAM_PHICH] == '12':
-                self.phich = '1/2'
-            elif parameters[self.PARAM_PHICH] in ['1/6', '1/2', '1', '2']:
-                self.phich = parameters[self.PARAM_PHICH]
-            else:
-                raise ValueError('The {} parameter can only be followed by 1,'
-                                 '2, 1/2 (or 12) and 1/6 (or 16).'.format(
-                                     self.PARAM_PHICH))
-
-        # Paging cycle duration
-        if self.PARAM_PAGING not in parameters:
-            self.log.warning('The {} parameter was not provided. Setting '
-                             'paging cycle duration to 1280 ms by '
-                             'default.'.format(self.PARAM_PAGING))
-            self.paging_cycle = 1280
-        else:
-            try:
-                self.paging_cycle = int(parameters[self.PARAM_PAGING])
-            except ValueError:
-                raise ValueError(
-                    'The {} key has to be followed by the paging cycle '
-                    'duration in milliseconds.'.format(self.PARAM_PAGING))
-
-    def get_duplex_mode(self):
-        """ Determines if the cell uses FDD or TDD duplex mode
-
-        Returns:
-          an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-        if 33 <= int(self.band) <= 46:
-            return lte_sim.DuplexMode.TDD
-        else:
-            return lte_sim.DuplexMode.FDD
-
-    def allocation_percentages_to_rbs(self, dl, ul):
-        """ Converts usage percentages to number of DL/UL RBs
-
-        Because not any number of DL/UL RBs can be obtained for a certain
-        bandwidth, this function calculates the number of RBs that most
-        closely matches the desired DL/UL percentages.
-
-        Args:
-            dl: desired percentage of downlink RBs
-            ul: desired percentage of uplink RBs
-        Returns:
-            a tuple indicating the number of downlink and uplink RBs
-        """
-
-        # Validate the arguments
-        if (not 0 <= dl <= 100) or (not 0 <= ul <= 100):
-            raise ValueError("The percentage of DL and UL RBs have to be two "
-                             "positive between 0 and 100.")
-
-        # Get min and max values from tables
-        max_rbs = lte_sim.TOTAL_RBS_DICTIONARY[self.bandwidth]
-        min_dl_rbs = lte_sim.MIN_DL_RBS_DICTIONARY[self.bandwidth]
-        min_ul_rbs = lte_sim.MIN_UL_RBS_DICTIONARY[self.bandwidth]
-
-        def percentage_to_amount(min_val, max_val, percentage):
-            """ Returns the integer between min_val and max_val that is closest
-            to percentage/100*max_val
-            """
-
-            # Calculate the value that corresponds to the required percentage.
-            closest_int = round(max_val * percentage / 100)
-            # Cannot be less than min_val
-            closest_int = max(closest_int, min_val)
-            # RBs cannot be more than max_rbs
-            closest_int = min(closest_int, max_val)
-
-            return closest_int
-
-        # Calculate the number of DL RBs
-
-        # Get the number of DL RBs that corresponds to
-        #  the required percentage.
-        desired_dl_rbs = percentage_to_amount(min_val=min_dl_rbs,
-                                              max_val=max_rbs,
-                                              percentage=dl)
-
-        if self.transmission_mode == lte_sim.TransmissionMode.TM3 or \
-                self.transmission_mode == lte_sim.TransmissionMode.TM4:
-
-            # For TM3 and TM4 the number of DL RBs needs to be max_rbs or a
-            # multiple of the RBG size
-
-            if desired_dl_rbs == max_rbs:
-                dl_rbs = max_rbs
-            else:
-                dl_rbs = (math.ceil(
-                    desired_dl_rbs / lte_sim.RBG_DICTIONARY[self.bandwidth]) *
-                          lte_sim.RBG_DICTIONARY[self.bandwidth])
-
-        else:
-            # The other TMs allow any number of RBs between 1 and max_rbs
-            dl_rbs = desired_dl_rbs
-
-        # Calculate the number of UL RBs
-
-        # Get the number of UL RBs that corresponds
-        # to the required percentage
-        desired_ul_rbs = percentage_to_amount(min_val=min_ul_rbs,
-                                              max_val=max_rbs,
-                                              percentage=ul)
-
-        # Create a list of all possible UL RBs assignment
-        # The standard allows any number that can be written as
-        # 2**a * 3**b * 5**c for any combination of a, b and c.
-
-        def pow_range(max_value, base):
-            """ Returns a range of all possible powers of base under
-              the given max_value.
-          """
-            return range(int(math.ceil(math.log(max_value, base))))
-
-        possible_ul_rbs = [
-            2 ** a * 3 ** b * 5 ** c for a in pow_range(max_rbs, 2)
-            for b in pow_range(max_rbs, 3)
-            for c in pow_range(max_rbs, 5)
-            if 2 ** a * 3 ** b * 5 ** c <= max_rbs]  # yapf: disable
-
-        # Find the value in the list that is closest to desired_ul_rbs
-        differences = [abs(rbs - desired_ul_rbs) for rbs in possible_ul_rbs]
-        ul_rbs = possible_ul_rbs[differences.index(min(differences))]
-
-        # Report what are the obtained RB percentages
-        self.log.info("Requested a {}% / {}% RB allocation. Closest possible "
-                      "percentages are {}% / {}%.".format(
-                          dl, ul, round(100 * dl_rbs / max_rbs),
-                          round(100 * ul_rbs / max_rbs)))
-
-        return dl_rbs, ul_rbs
diff --git a/src/antlion/controllers/cellular_lib/LteImsSimulation.py b/src/antlion/controllers/cellular_lib/LteImsSimulation.py
deleted file mode 100644
index 9f74714..0000000
--- a/src/antlion/controllers/cellular_lib/LteImsSimulation.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-
-import antlion.controllers.anritsu_lib.md8475a as md8475a
-from antlion.controllers.cellular_lib.LteSimulation import LteSimulation
-
-# Time to wait for Anritsu's IMS CSCF state change
-MAX_WAIT_TIME_IMS_CSCF_STATE = 30
-# default ims virtual network id for Anritsu ims call test.
-DEFAULT_IMS_VIRTUAL_NETWORK_ID = 1
-
-
-class LteImsSimulation(LteSimulation):
-
-    LTE_BASIC_SIM_FILE = 'VoLTE_ATT_Sim.wnssp'
-    LTE_BASIC_CELL_FILE = 'VoLTE_ATT_Cell.wnscp'
-
-    def attach(self):
-        """ After attaching verify the UE has registered with the IMS server.
-
-        Returns:
-            True if the phone was able to attach, False if not.
-        """
-
-        if not super().attach():
-            return False
-
-        # The phone should have registered with the IMS server before attaching.
-        # Make sure the IMS registration was successful by verifying the CSCF
-        # status is SIP IDLE.
-        if not _wait_for_ims_cscf_status(
-                self.log,
-                self.simulator.anritsu,
-                DEFAULT_IMS_VIRTUAL_NETWORK_ID,
-                md8475a.ImsCscfStatus.SIPIDLE.value):
-            self.log.error('UE failed to register with the IMS server.')
-            return False
-
-        return True
-
-
-def _wait_for_ims_cscf_status(log,
-                              anritsu_handle,
-                              virtual_network_id,
-                              status,
-                              timeout=MAX_WAIT_TIME_IMS_CSCF_STATE):
-    """ Wait for IMS CSCF to be in expected state.
-
-    Args:
-        log: log object
-        anritsu_handle: anritsu object
-        virtual_network_id: virtual network id to be monitored
-        status: expected status
-        timeout: wait time
-    """
-    sleep_interval = 1
-    wait_time = timeout
-    while wait_time > 0:
-        if status == anritsu_handle.get_ims_cscf_status(virtual_network_id):
-            return True
-        time.sleep(sleep_interval)
-        wait_time = wait_time - sleep_interval
-    return False
diff --git a/src/antlion/controllers/cellular_lib/LteSimulation.py b/src/antlion/controllers/cellular_lib/LteSimulation.py
deleted file mode 100644
index b811a90..0000000
--- a/src/antlion/controllers/cellular_lib/LteSimulation.py
+++ /dev/null
@@ -1,923 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-from enum import Enum
-
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib.LteCellConfig import LteCellConfig
-from antlion.controllers.cellular_lib.NrCellConfig import NrCellConfig
-from antlion.controllers.cellular_lib import BaseCellularDut
-
-
-class TransmissionMode(Enum):
-    """ Transmission modes for LTE (e.g., TM1, TM4, ...) """
-    TM1 = "TM1"
-    TM2 = "TM2"
-    TM3 = "TM3"
-    TM4 = "TM4"
-    TM7 = "TM7"
-    TM8 = "TM8"
-    TM9 = "TM9"
-
-
-class MimoMode(Enum):
-    """ Mimo modes """
-    MIMO_1x1 = "1x1"
-    MIMO_2x2 = "2x2"
-    MIMO_4x4 = "4x4"
-
-
-class SchedulingMode(Enum):
-    """ Traffic scheduling modes (e.g., STATIC, DYNAMIC) """
-    DYNAMIC = "DYNAMIC"
-    STATIC = "STATIC"
-
-
-class DuplexMode(Enum):
-    """ DL/UL Duplex mode """
-    FDD = "FDD"
-    TDD = "TDD"
-
-
-class ModulationType(Enum):
-    """DL/UL Modulation order."""
-    QPSK = 'QPSK'
-    Q16 = '16QAM'
-    Q64 = '64QAM'
-    Q256 = '256QAM'
-
-
-# Bandwidth [MHz] to RB group size
-RBG_DICTIONARY = {20: 4, 15: 4, 10: 3, 5: 2, 3: 2, 1.4: 1}
-
-# Bandwidth [MHz] to total RBs mapping
-TOTAL_RBS_DICTIONARY = {20: 100, 15: 75, 10: 50, 5: 25, 3: 15, 1.4: 6}
-
-# Bandwidth [MHz] to minimum number of DL RBs that can be assigned to a UE
-MIN_DL_RBS_DICTIONARY = {20: 16, 15: 12, 10: 9, 5: 4, 3: 4, 1.4: 2}
-
-# Bandwidth [MHz] to minimum number of UL RBs that can be assigned to a UE
-MIN_UL_RBS_DICTIONARY = {20: 8, 15: 6, 10: 4, 5: 2, 3: 2, 1.4: 1}
-
-
-class LteSimulation(BaseSimulation):
-    """ Single-carrier LTE simulation. """
-    # Test config keywords
-    KEY_FREQ_BANDS = "freq_bands"
-
-    # Cell param keywords
-    PARAM_RRC_STATUS_CHANGE_TIMER = "rrcstatuschangetimer"
-
-    # Units in which signal level is defined in DOWNLINK_SIGNAL_LEVEL_DICTIONARY
-    DOWNLINK_SIGNAL_LEVEL_UNITS = "RSRP"
-
-    # RSRP signal levels thresholds (as reported by Android) in dBm/15KHz.
-    # Excellent is set to -75 since callbox B Tx power is limited to -30 dBm
-    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'excellent': -75,
-        'high': -110,
-        'medium': -115,
-        'weak': -120,
-        'disconnected': -170
-    }
-
-    # Transmitted output power for the phone (dBm)
-    UPLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'max': 27,
-        'high': 13,
-        'medium': 3,
-        'low': -20
-    }
-
-    # Allowed bandwidth for each band.
-    allowed_bandwidth_dictionary = {
-        1: [5, 10, 15, 20],
-        2: [1.4, 3, 5, 10, 15, 20],
-        3: [1.4, 3, 5, 10, 15, 20],
-        4: [1.4, 3, 5, 10, 15, 20],
-        5: [1.4, 3, 5, 10],
-        7: [5, 10, 15, 20],
-        8: [1.4, 3, 5, 10],
-        10: [5, 10, 15, 20],
-        11: [5, 10],
-        12: [1.4, 3, 5, 10],
-        13: [5, 10],
-        14: [5, 10],
-        17: [5, 10],
-        18: [5, 10, 15],
-        19: [5, 10, 15],
-        20: [5, 10, 15, 20],
-        21: [5, 10, 15],
-        22: [5, 10, 15, 20],
-        24: [5, 10],
-        25: [1.4, 3, 5, 10, 15, 20],
-        26: [1.4, 3, 5, 10, 15],
-        27: [1.4, 3, 5, 10],
-        28: [3, 5, 10, 15, 20],
-        29: [3, 5, 10],
-        30: [5, 10],
-        31: [1.4, 3, 5],
-        32: [5, 10, 15, 20],
-        33: [5, 10, 15, 20],
-        34: [5, 10, 15],
-        35: [1.4, 3, 5, 10, 15, 20],
-        36: [1.4, 3, 5, 10, 15, 20],
-        37: [5, 10, 15, 20],
-        38: [20],
-        39: [5, 10, 15, 20],
-        40: [5, 10, 15, 20],
-        41: [5, 10, 15, 20],
-        42: [5, 10, 15, 20],
-        43: [5, 10, 15, 20],
-        44: [3, 5, 10, 15, 20],
-        45: [5, 10, 15, 20],
-        46: [10, 20],
-        47: [10, 20],
-        48: [5, 10, 15, 20],
-        49: [10, 20],
-        50: [3, 5, 10, 15, 20],
-        51: [3, 5],
-        52: [5, 10, 15, 20],
-        65: [5, 10, 15, 20],
-        66: [1.4, 3, 5, 10, 15, 20],
-        67: [5, 10, 15, 20],
-        68: [5, 10, 15],
-        69: [5],
-        70: [5, 10, 15],
-        71: [5, 10, 15, 20],
-        72: [1.4, 3, 5],
-        73: [1.4, 3, 5],
-        74: [1.4, 3, 5, 10, 15, 20],
-        75: [5, 10, 15, 20],
-        76: [5],
-        85: [5, 10],
-        252: [20],
-        255: [20]
-    }
-
-    # Dictionary of lower DL channel number bound for each band.
-    LOWEST_DL_CN_DICTIONARY = {
-        1: 0,
-        2: 600,
-        3: 1200,
-        4: 1950,
-        5: 2400,
-        6: 2650,
-        7: 2750,
-        8: 3450,
-        9: 3800,
-        10: 4150,
-        11: 4750,
-        12: 5010,
-        13: 5180,
-        14: 5280,
-        17: 5730,
-        18: 5850,
-        19: 6000,
-        20: 6150,
-        21: 6450,
-        22: 6600,
-        23: 7500,
-        24: 7700,
-        25: 8040,
-        26: 8690,
-        27: 9040,
-        28: 9210,
-        29: 9660,
-        30: 9770,
-        31: 9870,
-        32: 9920,
-        33: 36000,
-        34: 36200,
-        35: 36350,
-        36: 36950,
-        37: 37550,
-        38: 37750,
-        39: 38250,
-        40: 38650,
-        41: 39650,
-        42: 41590,
-        43: 45590,
-        66: 66436,
-        67: 67336
-    }
-
-    # Peak throughput lookup tables for each TDD subframe
-    # configuration and bandwidth
-    # yapf: disable
-    tdd_config4_tput_lut = {
-        0: {
-            5: {'DL': 3.82, 'UL': 2.63},
-            10: {'DL': 11.31,'UL': 9.03},
-            15: {'DL': 16.9, 'UL': 20.62},
-            20: {'DL': 22.88, 'UL': 28.43}
-        },
-        1: {
-            5: {'DL': 6.13, 'UL': 4.08},
-            10: {'DL': 18.36, 'UL': 9.69},
-            15: {'DL': 28.62, 'UL': 14.21},
-            20: {'DL': 39.04, 'UL': 19.23}
-        },
-        2: {
-            5: {'DL': 5.68, 'UL': 2.30},
-            10: {'DL': 25.51, 'UL': 4.68},
-            15: {'DL': 39.3, 'UL': 7.13},
-            20: {'DL': 53.64, 'UL': 9.72}
-        },
-        3: {
-            5: {'DL': 8.26, 'UL': 3.45},
-            10: {'DL': 23.20, 'UL': 6.99},
-            15: {'DL': 35.35, 'UL': 10.75},
-            20: {'DL': 48.3, 'UL': 14.6}
-        },
-        4: {
-            5: {'DL': 6.16, 'UL': 2.30},
-            10: {'DL': 26.77, 'UL': 4.68},
-            15: {'DL': 40.7, 'UL': 7.18},
-            20: {'DL': 55.6, 'UL': 9.73}
-        },
-        5: {
-            5: {'DL': 6.91, 'UL': 1.12},
-            10: {'DL': 30.33, 'UL': 2.33},
-            15: {'DL': 46.04, 'UL': 3.54},
-            20: {'DL': 62.9, 'UL': 4.83}
-        },
-        6: {
-            5: {'DL': 6.13, 'UL': 4.13},
-            10: {'DL': 14.79, 'UL': 11.98},
-            15: {'DL': 23.28, 'UL': 17.46},
-            20: {'DL': 31.75, 'UL': 23.95}
-        }
-    }
-
-    tdd_config3_tput_lut = {
-        0: {
-            5: {'DL': 5.04, 'UL': 3.7},
-            10: {'DL': 15.11, 'UL': 17.56},
-            15: {'DL': 22.59, 'UL': 30.31},
-            20: {'DL': 30.41, 'UL': 41.61}
-        },
-        1: {
-            5: {'DL': 8.07, 'UL': 5.66},
-            10: {'DL': 24.58, 'UL': 13.66},
-            15: {'DL': 39.05, 'UL': 20.68},
-            20: {'DL': 51.59, 'UL': 28.76}
-        },
-        2: {
-            5: {'DL': 7.59, 'UL': 3.31},
-            10: {'DL': 34.08, 'UL': 6.93},
-            15: {'DL': 53.64, 'UL': 10.51},
-            20: {'DL': 70.55, 'UL': 14.41}
-        },
-        3: {
-            5: {'DL': 10.9, 'UL': 5.0},
-            10: {'DL': 30.99, 'UL': 10.25},
-            15: {'DL': 48.3, 'UL': 15.81},
-            20: {'DL': 63.24, 'UL': 21.65}
-        },
-        4: {
-            5: {'DL': 8.11, 'UL': 3.32},
-            10: {'DL': 35.74, 'UL': 6.95},
-            15: {'DL': 55.6, 'UL': 10.51},
-            20: {'DL': 72.72, 'UL': 14.41}
-        },
-        5: {
-            5: {'DL': 9.28, 'UL': 1.57},
-            10: {'DL': 40.49, 'UL': 3.44},
-            15: {'DL': 62.9, 'UL': 5.23},
-            20: {'DL': 82.21, 'UL': 7.15}
-        },
-        6: {
-            5: {'DL': 8.06, 'UL': 5.74},
-            10: {'DL': 19.82, 'UL': 17.51},
-            15: {'DL': 31.75, 'UL': 25.77},
-            20: {'DL': 42.12, 'UL': 34.91}
-        }
-    }
-
-    tdd_config2_tput_lut = {
-        0: {
-            5: {'DL': 3.11, 'UL': 2.55},
-            10: {'DL': 9.93, 'UL': 11.1},
-            15: {'DL': 13.9, 'UL': 21.51},
-            20: {'DL': 20.02, 'UL': 41.66}
-        },
-        1: {
-            5: {'DL': 5.33, 'UL': 4.27},
-            10: {'DL': 15.14, 'UL': 13.95},
-            15: {'DL': 33.84, 'UL': 19.73},
-            20: {'DL': 44.61, 'UL': 27.35}
-        },
-        2: {
-            5: {'DL': 6.87, 'UL': 3.32},
-            10: {'DL': 17.06, 'UL': 6.76},
-            15: {'DL': 49.63, 'UL': 10.5},
-            20: {'DL': 65.2, 'UL': 14.41}
-        },
-        3: {
-            5: {'DL': 5.41, 'UL': 4.17},
-            10: {'DL': 16.89, 'UL': 9.73},
-            15: {'DL': 44.29, 'UL': 15.7},
-            20: {'DL': 53.95, 'UL': 19.85}
-        },
-        4: {
-            5: {'DL': 8.7, 'UL': 3.32},
-            10: {'DL': 17.58, 'UL': 6.76},
-            15: {'DL': 51.08, 'UL': 10.47},
-            20: {'DL': 66.45, 'UL': 14.38}
-        },
-        5: {
-            5: {'DL': 9.46, 'UL': 1.55},
-            10: {'DL': 19.02, 'UL': 3.48},
-            15: {'DL': 58.89, 'UL': 5.23},
-            20: {'DL': 76.85, 'UL': 7.1}
-        },
-        6: {
-            5: {'DL': 4.74, 'UL': 3.9},
-            10: {'DL': 12.32, 'UL': 13.37},
-            15: {'DL': 27.74, 'UL': 25.02},
-            20: {'DL': 35.48, 'UL': 32.95}
-        }
-    }
-
-    tdd_config1_tput_lut = {
-        0: {
-            5: {'DL': 4.25, 'UL': 3.35},
-            10: {'DL': 8.38, 'UL': 7.22},
-            15: {'DL': 12.41, 'UL': 13.91},
-            20: {'DL': 16.27, 'UL': 24.09}
-        },
-        1: {
-            5: {'DL': 7.28, 'UL': 4.61},
-            10: {'DL': 14.73, 'UL': 9.69},
-            15: {'DL': 21.91, 'UL': 13.86},
-            20: {'DL': 27.63, 'UL': 17.18}
-        },
-        2: {
-            5: {'DL': 10.37, 'UL': 2.27},
-            10: {'DL': 20.92, 'UL': 4.66},
-            15: {'DL': 31.01, 'UL': 7.04},
-            20: {'DL': 42.03, 'UL': 9.75}
-        },
-        3: {
-            5: {'DL': 9.25, 'UL': 3.44},
-            10: {'DL': 18.38, 'UL': 6.95},
-            15: {'DL': 27.59, 'UL': 10.62},
-            20: {'DL': 34.85, 'UL': 13.45}
-        },
-        4: {
-            5: {'DL': 10.71, 'UL': 2.26},
-            10: {'DL': 21.54, 'UL': 4.67},
-            15: {'DL': 31.91, 'UL': 7.2},
-            20: {'DL': 43.35, 'UL': 9.74}
-        },
-        5: {
-            5: {'DL': 12.34, 'UL': 1.08},
-            10: {'DL': 24.78, 'UL': 2.34},
-            15: {'DL': 36.68, 'UL': 3.57},
-            20: {'DL': 49.84, 'UL': 4.81}
-        },
-        6: {
-            5: {'DL': 5.76, 'UL': 4.41},
-            10: {'DL': 11.68, 'UL': 9.7},
-            15: {'DL': 17.34, 'UL': 17.95},
-            20: {'DL': 23.5, 'UL': 23.42}
-        }
-    }
-    # yapf: enable
-
-    # Peak throughput lookup table dictionary
-    tdd_config_tput_lut_dict = {
-        'TDD_CONFIG1':
-        tdd_config1_tput_lut,  # DL 256QAM, UL 64QAM & MAC padding turned OFF
-        'TDD_CONFIG2':
-        tdd_config2_tput_lut,  # DL 256QAM, UL 64 QAM ON & MAC padding OFF
-        'TDD_CONFIG3':
-        tdd_config3_tput_lut,  # DL 256QAM, UL 64QAM & MAC padding ON
-        'TDD_CONFIG4':
-        tdd_config4_tput_lut  # DL 256QAM, UL 64 QAM OFF & MAC padding ON
-    }
-
-    def __init__(
-        self, simulator, log, dut, test_config, calibration_table,
-        nr_mode=None):
-        """ Initializes the simulator for a single-carrier LTE simulation.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-
-        """
-
-        super().__init__(
-            simulator, log, dut, test_config, calibration_table, nr_mode)
-
-        self.num_carriers = None
-
-        # Force device to LTE only so that it connects faster
-        try:
-            if self.nr_mode and 'nr' == self.nr_mode:
-                self.dut.set_preferred_network_type(
-                    BaseCellularDut.PreferredNetworkType.LTE_NR)
-            else:
-                self.dut.set_preferred_network_type(
-                    BaseCellularDut.PreferredNetworkType.LTE_ONLY)
-        except Exception as e:
-            # If this fails the test should be able to run anyways, even if it
-            # takes longer to find the cell.
-            self.log.warning('Setting preferred RAT failed: ' + str(e))
-
-        # Get LTE CA frequency bands setting from the test configuration
-        if self.KEY_FREQ_BANDS not in test_config:
-            self.log.warning("The key '{}' is not set in the config file. "
-                             "Setting to null by default.".format(
-                                 self.KEY_FREQ_BANDS))
-
-        self.freq_bands = test_config.get(self.KEY_FREQ_BANDS, True)
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-        if self.nr_mode and 'nr' == self.nr_mode:
-            self.log.info('Initializes the callbox to Nr Nsa scenario')
-            self.simulator.setup_nr_nsa_scenario()
-        else:
-            self.log.info('Initializes the callbox to LTE scenario')
-            self.simulator.setup_lte_scenario()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Processes LTE configuration parameters.
-
-        Args:
-            parameters: a configuration dictionary if there is only one carrier,
-                a list if there are multiple cells.
-        """
-        # If there is a single item, put in a list
-        if not isinstance(parameters, list):
-            parameters = [parameters]
-
-        # Pass only PCC configs to BaseSimulation
-        super().configure(parameters[0])
-
-        new_cell_list = []
-        for cell in parameters:
-            if LteCellConfig.PARAM_BAND not in cell:
-                raise ValueError(
-                    "The configuration dictionary must include a key '{}' with "
-                    "the required band number.".format(
-                        LteCellConfig.PARAM_BAND))
-
-            band = cell[LteCellConfig.PARAM_BAND]
-
-            if isinstance(band, str) and not band.isdigit():
-                # If band starts with n then it is an NR band
-                if band[0] == 'n' and band[1:].isdigit():
-                    # If the remaining string is only the band number, add
-                    # the cell and continue
-                    new_cell_list.append(cell)
-                    continue
-
-                ca_class = band[-1].upper()
-                band_num = band[:-1]
-
-                if ca_class in ['A', 'C']:
-                    # Remove the CA class label and add the cell
-                    cell[LteCellConfig.PARAM_BAND] = band_num
-                    new_cell_list.append(cell)
-                elif ca_class == 'B':
-                    raise RuntimeError('Class B LTE CA not supported.')
-                else:
-                    raise ValueError('Invalid band value: ' + band)
-
-                # Class C means that there are two contiguous carriers
-                if ca_class == 'C':
-                    new_cell_list.append(dict(cell))
-                    bw = int(cell[LteCellConfig.PARAM_BW])
-                    dl_earfcn = LteCellConfig.PARAM_DL_EARFCN
-                    new_cell_list[-1][dl_earfcn] = self.LOWEST_DL_CN_DICTIONARY[
-                        int(band_num)] + bw * 10 - 2
-            else:
-                # The band is just a number, so just add it to the list
-                new_cell_list.append(cell)
-
-        # Logs new_cell_list for debug
-        self.log.info('new cell list: {}'.format(new_cell_list))
-
-        self.simulator.set_band_combination(
-            [c[LteCellConfig.PARAM_BAND] for c in new_cell_list])
-
-        self.num_carriers = len(new_cell_list)
-
-        # Setup the base stations with the obtain configuration
-        self.cell_configs = []
-        for i in range(self.num_carriers):
-            band = new_cell_list[i][LteCellConfig.PARAM_BAND]
-            if isinstance(band, str) and band[0] == 'n':
-                self.cell_configs.append(NrCellConfig(self.log))
-            else:
-                self.cell_configs.append(LteCellConfig(self.log))
-            self.cell_configs[i].configure(new_cell_list[i])
-            self.simulator.configure_bts(self.cell_configs[i], i)
-
-        # Now that the band is set, calibrate the link if necessary
-        self.load_pathloss_if_required()
-
-        # This shouldn't be a cell parameter but instead a simulation config
-        # Setup LTE RRC status change function and timer for LTE idle test case
-        if self.PARAM_RRC_STATUS_CHANGE_TIMER not in parameters[0]:
-            self.log.info(
-                "The test config does not include the '{}' key. Disabled "
-                "by default.".format(self.PARAM_RRC_STATUS_CHANGE_TIMER))
-            self.simulator.set_lte_rrc_state_change_timer(False)
-        else:
-            timer = int(parameters[0][self.PARAM_RRC_STATUS_CHANGE_TIMER])
-            self.simulator.set_lte_rrc_state_change_timer(True, timer)
-            self.rrc_sc_timer = timer
-
-    def calibrated_downlink_rx_power(self, bts_config, rsrp):
-        """ LTE simulation overrides this method so that it can convert from
-        RSRP to total signal power transmitted from the basestation.
-
-        Args:
-            bts_config: the current configuration at the base station
-            rsrp: desired rsrp, contained in a key value pair
-        """
-
-        power = self.rsrp_to_signal_power(rsrp, bts_config)
-
-        self.log.info(
-            "Setting downlink signal level to {} RSRP ({} dBm)".format(
-                rsrp, power))
-
-        # Use parent method to calculate signal level
-        return super().calibrated_downlink_rx_power(bts_config, power)
-
-    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
-        """ Computes downlink path loss and returns the calibration value.
-
-        See base class implementation for details.
-
-        Args:
-            rat: ignored, replaced by 'lteRsrp'
-            power_units_conversion_func: ignored, replaced by
-                self.rsrp_to_signal_power
-
-        Returns:
-            Downlink calibration value and measured DL power. Note that the
-            phone only reports RSRP of the primary chain
-        """
-
-        return super().downlink_calibration(
-            rat='lteDbm',
-            power_units_conversion_func=self.rsrp_to_signal_power)
-
-    def rsrp_to_signal_power(self, rsrp, bts_config):
-        """ Converts rsrp to total band signal power
-
-        RSRP is measured per subcarrier, so total band power needs to be
-        multiplied by the number of subcarriers being used.
-
-        Args:
-            rsrp: desired rsrp in dBm
-            bts_config: a base station configuration object
-        Returns:
-            Total band signal power in dBm
-        """
-
-        bandwidth = bts_config.bandwidth
-
-        if bandwidth == 100: # This assumes 273 RBs. TODO: b/229163022
-            power = rsrp + 35.15
-        elif bandwidth == 20:  # 100 RBs
-            power = rsrp + 30.79
-        elif bandwidth == 15:  # 75 RBs
-            power = rsrp + 29.54
-        elif bandwidth == 10:  # 50 RBs
-            power = rsrp + 27.78
-        elif bandwidth == 5:  # 25 RBs
-            power = rsrp + 24.77
-        elif bandwidth == 3:  # 15 RBs
-            power = rsrp + 22.55
-        elif bandwidth == 1.4:  # 6 RBs
-            power = rsrp + 18.57
-        else:
-            raise ValueError("Invalid bandwidth value.")
-
-        return power
-
-    def maximum_downlink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-        return sum(
-            self.bts_maximum_downlink_throughtput(self.cell_configs[bts_index])
-            for bts_index in range(self.num_carriers))
-
-    def bts_maximum_downlink_throughtput(self, bts_config):
-        """ Calculates maximum achievable downlink throughput for a single
-        base station from its configuration object.
-
-        Args:
-            bts_config: a base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-        if bts_config.mimo_mode == MimoMode.MIMO_1x1:
-            streams = 1
-        elif bts_config.mimo_mode == MimoMode.MIMO_2x2:
-            streams = 2
-        elif bts_config.mimo_mode == MimoMode.MIMO_4x4:
-            streams = 4
-        else:
-            raise ValueError('Unable to calculate maximum downlink throughput '
-                             'because the MIMO mode has not been set.')
-
-        bandwidth = bts_config.bandwidth
-        rb_ratio = bts_config.dl_rbs / TOTAL_RBS_DICTIONARY[bandwidth]
-        mcs = bts_config.dl_mcs
-
-        max_rate_per_stream = None
-
-        tdd_subframe_config = bts_config.dlul_config
-        duplex_mode = bts_config.get_duplex_mode()
-
-        if duplex_mode == DuplexMode.TDD:
-            if bts_config.dl_256_qam_enabled:
-                if mcs == 27:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG3'][tdd_subframe_config][bandwidth][
-                                'DL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG2'][tdd_subframe_config][bandwidth][
-                                'DL']
-            else:
-                if mcs == 28:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG4'][tdd_subframe_config][bandwidth][
-                                'DL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG1'][tdd_subframe_config][bandwidth][
-                                'DL']
-
-        elif duplex_mode == DuplexMode.FDD:
-            if (not bts_config.dl_256_qam_enabled and bts_config.mac_padding
-                    and mcs == 28):
-                max_rate_per_stream = {
-                    3: 9.96,
-                    5: 17.0,
-                    10: 34.7,
-                    15: 52.7,
-                    20: 72.2
-                }.get(bandwidth, None)
-            if (not bts_config.dl_256_qam_enabled and bts_config.mac_padding
-                    and mcs == 27):
-                max_rate_per_stream = {
-                    1.4: 2.94,
-                }.get(bandwidth, None)
-            elif (not bts_config.dl_256_qam_enabled
-                  and not bts_config.mac_padding and mcs == 27):
-                max_rate_per_stream = {
-                    1.4: 2.87,
-                    3: 7.7,
-                    5: 14.4,
-                    10: 28.7,
-                    15: 42.3,
-                    20: 57.7
-                }.get(bandwidth, None)
-            elif bts_config.dl_256_qam_enabled and bts_config.mac_padding and mcs == 27:
-                max_rate_per_stream = {
-                    3: 13.2,
-                    5: 22.9,
-                    10: 46.3,
-                    15: 72.2,
-                    20: 93.9
-                }.get(bandwidth, None)
-            elif bts_config.dl_256_qam_enabled and bts_config.mac_padding and mcs == 26:
-                max_rate_per_stream = {
-                    1.4: 3.96,
-                }.get(bandwidth, None)
-            elif (bts_config.dl_256_qam_enabled and not bts_config.mac_padding
-                  and mcs == 27):
-                max_rate_per_stream = {
-                    3: 11.3,
-                    5: 19.8,
-                    10: 44.1,
-                    15: 68.1,
-                    20: 88.4
-                }.get(bandwidth, None)
-            elif (bts_config.dl_256_qam_enabled and not bts_config.mac_padding
-                  and mcs == 26):
-                max_rate_per_stream = {
-                    1.4: 3.96,
-                }.get(bandwidth, None)
-
-        if not max_rate_per_stream:
-            raise NotImplementedError(
-                "The calculation for MAC padding = {} "
-                "and mcs = {} is not implemented.".format(
-                    "FULLALLOCATION" if bts_config.mac_padding else "OFF",
-                    mcs))
-
-        return max_rate_per_stream * streams * rb_ratio
-
-    def maximum_uplink_throughput(self):
-        """ Calculates maximum achievable uplink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        return self.bts_maximum_uplink_throughtput(self.cell_configs[0])
-
-    def bts_maximum_uplink_throughtput(self, bts_config):
-        """ Calculates maximum achievable uplink throughput for the selected
-        basestation from its configuration object.
-
-        Args:
-            bts_config: an LTE base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        bandwidth = bts_config.bandwidth
-        rb_ratio = bts_config.ul_rbs / TOTAL_RBS_DICTIONARY[bandwidth]
-        mcs = bts_config.ul_mcs
-
-        max_rate_per_stream = None
-
-        tdd_subframe_config = bts_config.dlul_config
-        duplex_mode = bts_config.get_duplex_mode()
-
-        if duplex_mode == DuplexMode.TDD:
-            if bts_config.ul_64_qam_enabled:
-                if mcs == 28:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG3'][tdd_subframe_config][bandwidth][
-                                'UL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG2'][tdd_subframe_config][bandwidth][
-                                'UL']
-            else:
-                if mcs == 23:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG4'][tdd_subframe_config][bandwidth][
-                                'UL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG1'][tdd_subframe_config][bandwidth][
-                                'UL']
-
-        elif duplex_mode == DuplexMode.FDD:
-            if mcs == 23 and not bts_config.ul_64_qam_enabled:
-                max_rate_per_stream = {
-                    1.4: 2.85,
-                    3: 7.18,
-                    5: 12.1,
-                    10: 24.5,
-                    15: 36.5,
-                    20: 49.1
-                }.get(bandwidth, None)
-            elif mcs == 28 and bts_config.ul_64_qam_enabled:
-                max_rate_per_stream = {
-                    1.4: 4.2,
-                    3: 10.5,
-                    5: 17.2,
-                    10: 35.3,
-                    15: 53.0,
-                    20: 72.6
-                }.get(bandwidth, None)
-
-        if not max_rate_per_stream:
-            raise NotImplementedError(
-                "The calculation fir mcs = {} is not implemented.".format(
-                    "FULLALLOCATION" if bts_config.mac_padding else "OFF",
-                    mcs))
-
-        return max_rate_per_stream * rb_ratio
-
-    def calibrate(self, band):
-        """ Calculates UL and DL path loss if it wasn't done before
-
-        Before running the base class implementation, configure the base station
-        to only use one downlink antenna with maximum bandwidth.
-
-        Args:
-            band: the band that is currently being calibrated.
-        """
-
-        # Save initial values in a configuration object so they can be restored
-        restore_config = LteCellConfig(self.log)
-        restore_config.mimo_mode = self.cell_configs[0].mimo_mode
-        restore_config.transmission_mode = \
-            self.cell_configs[0].transmission_mode
-        restore_config.bandwidth = self.cell_configs[0].bandwidth
-
-        # Set up a temporary calibration configuration.
-        temporary_config = LteCellConfig(self.log)
-        temporary_config.mimo_mode = MimoMode.MIMO_1x1
-        temporary_config.transmission_mode = TransmissionMode.TM1
-        temporary_config.bandwidth = max(
-            self.allowed_bandwidth_dictionary[int(band)])
-        self.simulator.configure_bts(temporary_config)
-        self.cell_configs[0].incorporate(temporary_config)
-
-        super().calibrate(band)
-
-        # Restore values as they were before changing them for calibration.
-        self.simulator.configure_bts(restore_config)
-        self.cell_configs[0].incorporate(restore_config)
-
-    def start_traffic_for_calibration(self):
-        """ If MAC padding is enabled, there is no need to start IP traffic. """
-        if not self.cell_configs[0].mac_padding:
-            super().start_traffic_for_calibration()
-
-    def stop_traffic_for_calibration(self):
-        """ If MAC padding is enabled, IP traffic wasn't started. """
-        if not self.cell_configs[0].mac_padding:
-            super().stop_traffic_for_calibration()
-
-    def get_measured_ul_power(self, samples=5, wait_after_sample=3):
-        """ Calculates UL power using measurements from the callbox and the
-        calibration data.
-
-        Args:
-            samples: the numble of samples to average
-            wait_after_sample: time in seconds to wait in between samples
-
-        Returns:
-            the ul power at the UE antenna ports in dBs
-        """
-        ul_power_sum = 0
-        samples_left = samples
-
-        while samples_left > 0:
-            ul_power_sum += self.simulator.get_measured_pusch_power()
-            samples_left -= 1
-            time.sleep(wait_after_sample)
-
-        # Got enough samples, return calibrated average
-        if self.dl_path_loss:
-            return ul_power_sum / samples + self.ul_path_loss
-        else:
-            self.log.warning('No uplink calibration data. Returning '
-                             'uncalibrated values as measured by the '
-                             'callbox.')
-            return ul_power_sum / samples
-
-    def start(self):
-        """ Set the signal level for the secondary carriers, as the base class
-        implementation of this method will only set up downlink power for the
-        primary carrier component.
-
-        After that, attaches the secondary carriers."""
-
-        super().start()
-
-        if self.num_carriers > 1:
-            if self.sim_dl_power:
-                self.log.info('Setting DL power for secondary carriers.')
-
-                for bts_index in range(1, self.num_carriers):
-                    new_config = LteCellConfig(self.log)
-                    new_config.output_power = self.calibrated_downlink_rx_power(
-                        self.cell_configs[bts_index], self.sim_dl_power)
-                    self.simulator.configure_bts(new_config, bts_index)
-                    self.cell_configs[bts_index].incorporate(new_config)
-
-            self.simulator.lte_attach_secondary_carriers(self.freq_bands)
diff --git a/src/antlion/controllers/cellular_lib/NrCellConfig.py b/src/antlion/controllers/cellular_lib/NrCellConfig.py
deleted file mode 100644
index ff72dae..0000000
--- a/src/antlion/controllers/cellular_lib/NrCellConfig.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import antlion.controllers.cellular_lib.BaseCellConfig as base_cell
-import antlion.controllers.cellular_lib.LteSimulation as lte_sim
-
-
-class NrCellConfig(base_cell.BaseCellConfig):
-    """ NR cell configuration class.
-
-    Attributes:
-        band: an integer indicating the required band number.
-        bandwidth: a integer indicating the required channel bandwidth
-    """
-
-    PARAM_BAND = 'band'
-    PARAM_BW = 'bw'
-    PARAM_DL_MCS = 'dlmcs'
-    PARAM_DL_RBS = 'dl_rbs'
-    PARAM_PADDING = 'mac_padding'
-    PARAM_MIMO = 'mimo'
-    PARAM_NRARFCN = 'nr_arfcn'
-    PARAM_SCHEDULING = "scheduling"
-    PARAM_SCHEDULING_DYNAMIC = "dynamic"
-    PARAM_SCHEDULING_STATIC = "static"
-    PARAM_UL_MCS = 'ulmcs'
-    PARAM_UL_RBS = 'ul_rbs'
-
-    def __init__(self, log):
-        """ Initialize the base station config by setting all its
-        parameters to None.
-        Args:
-            log: logger object.
-        """
-        super().__init__(log)
-        self.band = None
-        self.bandwidth = None
-        self.dl_rbs = None
-        self.ul_rbs = None
-        self.dl_mcs = None
-        self.ul_mcs = None
-        self.mac_padding = None
-        self.mimo_mode = None
-        self.nr_arfcn = None
-
-    def configure(self, parameters):
-        """ Configures an NR cell using a dictionary of parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with "
-                "the required band number.".format(self.PARAM_BAND))
-        nr_band = parameters[self.PARAM_BAND]
-        if nr_band[0] == 'n':
-            nr_band = nr_band[1:]
-        self.band = nr_band
-
-        if self.PARAM_NRARFCN in parameters:
-            self.nr_arfcn = int(parameters[self.PARAM_NRARFCN])
-
-        if self.PARAM_BW not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter {} with an "
-                "int value (to indicate 1.4 MHz use 14).".format(
-                    self.PARAM_BW))
-        bw = float(parameters[self.PARAM_BW])
-
-        if abs(bw - 14) < 0.00000000001:
-            bw = 1.4
-
-        self.bandwidth = bw
-
-        # Setup mimo mode
-        if self.PARAM_MIMO not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter '{}' with the "
-                "mimo mode.".format(self.PARAM_MIMO))
-
-        for mimo_mode in lte_sim.MimoMode:
-            if parameters[self.PARAM_MIMO] == mimo_mode.value:
-                self.mimo_mode = mimo_mode
-                break
-        else:
-            raise ValueError("The value of {} must be one of the following:"
-                             "1x1, 2x2 or 4x4.".format(self.PARAM_MIMO))
-
-        if self.PARAM_SCHEDULING not in parameters:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-            self.log.warning(
-                "The test config does not include the '{}' key. Setting to "
-                "static by default.".format(self.PARAM_SCHEDULING))
-        elif parameters[
-                self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_DYNAMIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.DYNAMIC
-        elif parameters[self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_STATIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-        else:
-            raise ValueError("Key '{}' must have a value of "
-                             "'dynamic' or 'static'.".format(
-                                 self.PARAM_SCHEDULING))
-
-        if self.scheduling_mode == lte_sim.SchedulingMode.STATIC:
-
-            if self.PARAM_PADDING not in parameters:
-                self.log.warning(
-                    "The '{}' parameter was not set. Enabling MAC padding by "
-                    "default.".format(self.PARAM_PADDING))
-                self.mac_padding = True
-
-            if self.PARAM_DL_MCS in parameters:
-                self.dl_mcs = int(parameters[self.PARAM_DL_MCS])
-
-            if self.PARAM_UL_MCS in parameters:
-                self.ul_mcs = int(parameters[self.PARAM_UL_MCS])
-
-            # Temproraily setting: set 273 for bandwidth of 100 MHz
-            self.dl_rbs = 273
-            self.ul_rbs = 273
-
-    def __str__(self):
-        return str(vars(self))
diff --git a/src/antlion/controllers/cellular_lib/OWNERS b/src/antlion/controllers/cellular_lib/OWNERS
deleted file mode 100644
index f88a96c..0000000
--- a/src/antlion/controllers/cellular_lib/OWNERS
+++ /dev/null
@@ -1,8 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
-
-per-file PresetSimulation.py = hmtuan@google.com
-per-file PresetSimulation.py = harjani@google.com
-per-file PresetSimulation.py = jethier@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/cellular_lib/PresetSimulation.py b/src/antlion/controllers/cellular_lib/PresetSimulation.py
deleted file mode 100644
index f1f649b..0000000
--- a/src/antlion/controllers/cellular_lib/PresetSimulation.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib import BaseCellularDut
-
-
-class PresetSimulation(BaseSimulation):
-    """5G preset simulation.
-
-    The simulation will be configed by importing SCPI config file
-    instead of individually set params.
-    """
-
-    # Keys to obtain settings from the test_config dictionary.
-    KEY_CELL_INFO = "cell_info"
-    KEY_SCPI_FILE_NAME = "scpi_file"
-
-    def __init__(self,
-                 simulator,
-                 log,
-                 dut,
-                 test_config,
-                 calibration_table,
-                 nr_mode=None):
-        """Initializes the simulator for 5G preset simulation.
-
-        Args:
-            simulator: a cellular simulator controller.
-            log: a logger handle.
-            dut: a device handler implementing BaseCellularDut.
-            test_config: test configuration obtained from the config file.
-            calibration_table: a dictionary containing path losses
-                for different bands.
-        """
-
-        super().__init__(simulator, log, dut, test_config, calibration_table,
-                         nr_mode)
-
-        # Set to KeySight APN
-        log.info('Configuring APN.')
-        self.dut.set_apn('Keysight', 'Keysight')
-        self.num_carriers = None
-
-        # Enable roaming on the phone
-        self.dut.toggle_data_roaming(True)
-
-        # Force device to LTE only so that it connects faster
-        try:
-            self.dut.set_preferred_network_type(
-                BaseCellularDut.PreferredNetworkType.NR_LTE)
-        except Exception as e:
-            # If this fails the test should be able to run anyways, even if it
-            # takes longer to find the cell.
-            self.log.warning('Setting preferred RAT failed: ' + str(e))
-
-    def setup_simulator(self):
-        """Do initial configuration in the simulator. """
-        self.log.info('This simulation does not require initial setup.')
-
-    def configure(self, parameters):
-        """Configures simulation by importing scpi file.
-
-        A pre-made SCPI file include all the essential configuration
-        for the simulation is imported by send SCPI import command
-        to the callbox.
-
-        Args:
-            parameters: a configuration dictionary which includes scpi file path
-                if there is only one carrier, a list if there are multiple cells.
-        """
-        scpi_file = parameters[0][self.KEY_SCPI_FILE_NAME]
-        cell_infos = parameters[0][self.KEY_CELL_INFO]
-
-        self.log.info('Configure test scenario with\n' +
-                      f' SCPI config file: {scpi_file}\n' +
-                      f' cell info: {cell_infos}')
-
-        self.simulator.import_configuration(scpi_file)
-        self.simulator.set_cell_info(cell_infos)
-
-    def start(self):
-        """Start simulation.
-
-        Waiting for the DUT to connect to the callbox.
-
-        Raise:
-            RuntimeError: simulation fail to start
-                due to unable to connect dut and cells.
-        """
-
-        try:
-            self.attach()
-        except Exception as exc:
-            raise RuntimeError('Simulation fail to start.') from exc
-
-    def attach(self):
-        """Attach UE to the callbox.
-
-        Toggle airplane mode on-off and wait for a specified timeout,
-        repeat until the UE connect to the callbox.
-
-        Raise:
-            RuntimeError: attaching fail
-                due to unable to connect dut and cells.
-        """
-        try:
-            self.simulator.wait_until_attached(self.dut, self.attach_timeout,
-                                               self.attach_retries)
-        except Exception as exc:
-            raise RuntimeError('Could not attach to base station.') from exc
-
-    def calibrated_downlink_rx_power(self, bts_config, rsrp):
-        """Convert RSRP to total signal power from the basestation.
-
-        Args:
-            bts_config: the current configuration at the base station
-            rsrp: desired rsrp, contained in a key value pair
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
-        """Computes downlink path loss and returns the calibration value.
-
-        See base class implementation for details.
-
-        Args:
-            rat: ignored, replaced by 'lteRsrp'.
-            power_units_conversion_func: ignored, replaced by
-                self.rsrp_to_signal_power.
-
-        Returns:
-            Downlink calibration value and measured DL power. Note that the
-            phone only reports RSRP of the primary chain
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def rsrp_to_signal_power(self, rsrp, bts_config):
-        """Converts rsrp to total band signal power
-
-        RSRP is measured per subcarrier, so total band power needs to be
-        multiplied by the number of subcarriers being used.
-
-        Args:
-            rsrp: desired rsrp in dBm.
-            bts_config: a base station configuration object.
-
-        Returns:
-            Total band signal power in dBm
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def maximum_downlink_throughput(self):
-        """Calculates maximum achievable downlink throughput in.
-
-        The calculation is based on the current simulation state
-        Returns:
-            Maximum throughput in mbps.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def bts_maximum_downlink_throughtput(self, bts_config):
-        """Calculates maximum achievable downlink throughput for a single
-
-        base station from its configuration object.
-
-        Args:
-            bts_config: a base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def maximum_uplink_throughput(self):
-        """Calculates maximum achievable uplink throughput.
-
-        Returns:
-            Maximum throughput in mbps.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def bts_maximum_uplink_throughtput(self, bts_config):
-        """Calculates maximum achievable uplink throughput
-
-        The calculation is for selected basestation
-        from its configuration object.
-        Args:
-            bts_config: an LTE base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def calibrate(self, band):
-        """Calculates UL and DL path loss if it wasn't done before
-
-        Before running the base class implementation, configure the base station
-        to only use one downlink antenna with maximum bandwidth.
-
-        Args:
-            band: the band that is currently being calibrated.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def start_traffic_for_calibration(self):
-        """If MAC padding is enabled, there is no need to start IP traffic. """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def stop_traffic_for_calibration(self):
-        """If MAC padding is enabled, IP traffic wasn't started. """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def get_measured_ul_power(self, samples=5, wait_after_sample=3):
-        """Calculates UL power.
-
-        The calculation is based on measurements from the callbox
-        and the calibration data.
-        Args:
-            samples: the numble of samples to average
-            wait_after_sample: time in seconds to wait in between samples
-
-        Returns:
-            the ul power at the UE antenna ports in dBs
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
diff --git a/src/antlion/controllers/cellular_lib/UmtsSimulation.py b/src/antlion/controllers/cellular_lib/UmtsSimulation.py
deleted file mode 100644
index 316186f..0000000
--- a/src/antlion/controllers/cellular_lib/UmtsSimulation.py
+++ /dev/null
@@ -1,275 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ntpath
-import time
-
-from antlion.controllers.anritsu_lib import md8475_cellular_simulator as anritsusim
-from antlion.controllers.anritsu_lib.md8475a import BtsNumber
-from antlion.controllers.anritsu_lib.md8475a import BtsPacketRate
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib import BaseCellularDut
-
-
-class UmtsSimulation(BaseSimulation):
-    """ Single base station simulation. """
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-
-    UMTS_BASIC_SIM_FILE = 'SIM_default_WCDMA.wnssp'
-
-    UMTS_R99_CELL_FILE = 'CELL_WCDMA_R99_config.wnscp'
-
-    UMTS_R7_CELL_FILE = 'CELL_WCDMA_R7_config.wnscp'
-
-    UMTS_R8_CELL_FILE = 'CELL_WCDMA_R8_config.wnscp'
-
-    # Configuration dictionary keys
-    PARAM_RELEASE_VERSION = "r"
-    PARAM_RELEASE_VERSION_99 = "99"
-    PARAM_RELEASE_VERSION_8 = "8"
-    PARAM_RELEASE_VERSION_7 = "7"
-    PARAM_BAND = "band"
-    PARAM_RRC_STATUS_CHANGE_TIMER = "rrcstatuschangetimer"
-
-    # Units in which signal level is defined in DOWNLINK_SIGNAL_LEVEL_DICTIONARY
-    DOWNLINK_SIGNAL_LEVEL_UNITS = "RSCP"
-
-    # RSCP signal levels thresholds (as reported by Android). Units are dBm
-    # Using LTE thresholds + 24 dB to have equivalent SPD
-    # 24 dB comes from 10 * log10(3.84 MHz / 15 KHz)
-
-    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'excellent': -51,
-        'high': -76,
-        'medium': -86,
-        'weak': -96
-    }
-
-    # Transmitted output power for the phone
-    # Stronger Tx power means that the signal received by the BTS is weaker
-    # Units are dBm
-
-    UPLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'low': -20,
-        'medium': 8,
-        'high': 15,
-        'max': 23
-    }
-
-    # Converts packet rate to the throughput that can be actually obtained in
-    # Mbits/s
-
-    packet_rate_to_dl_throughput = {
-        BtsPacketRate.WCDMA_DL384K_UL64K: 0.362,
-        BtsPacketRate.WCDMA_DL21_6M_UL5_76M: 18.5,
-        BtsPacketRate.WCDMA_DL43_2M_UL5_76M: 36.9
-    }
-
-    packet_rate_to_ul_throughput = {
-        BtsPacketRate.WCDMA_DL384K_UL64K: 0.0601,
-        BtsPacketRate.WCDMA_DL21_6M_UL5_76M: 5.25,
-        BtsPacketRate.WCDMA_DL43_2M_UL5_76M: 5.25
-    }
-
-    def __init__(self, simulator, log, dut, test_config, calibration_table):
-        """ Initializes the cellular simulator for a UMTS simulation.
-
-        Loads a simple UMTS simulation environment with 1 basestation. It also
-        creates the BTS handle so we can change the parameters as desired.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-
-        """
-        # The UMTS simulation relies on the cellular simulator to be a MD8475
-        if not isinstance(self.simulator, anritsusim.MD8475CellularSimulator):
-            raise ValueError('The UMTS simulation relies on the simulator to '
-                             'be an Anritsu MD8475 A/B instrument.')
-
-        # The Anritsu controller needs to be unwrapped before calling
-        # super().__init__ because setup_simulator() requires self.anritsu and
-        # will be called during the parent class initialization.
-        self.anritsu = self.simulator.anritsu
-        self.bts1 = self.anritsu.get_BTS(BtsNumber.BTS1)
-
-        super().__init__(simulator, log, dut, test_config, calibration_table)
-
-        self.dut.set_preferred_network_type(
-            BaseCellularDut.PreferredNetworkType.WCDMA_ONLY)
-
-        self.release_version = None
-        self.packet_rate = None
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-
-        # Load callbox config files
-        callbox_config_path = self.CALLBOX_PATH_FORMAT_STR.format(
-            self.anritsu._md8475_version)
-
-        self.anritsu.load_simulation_paramfile(
-            ntpath.join(callbox_config_path, self.UMTS_BASIC_SIM_FILE))
-
-        # Start simulation if it wasn't started
-        self.anritsu.start_simulation()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Processes UMTS configuration parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        super().configure(parameters)
-
-        # Setup band
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with "
-                "the required band number.".format(self.PARAM_BAND))
-
-        self.set_band(self.bts1, parameters[self.PARAM_BAND])
-        self.load_pathloss_if_required()
-
-        # Setup release version
-        if (self.PARAM_RELEASE_VERSION not in parameters
-                or parameters[self.PARAM_RELEASE_VERSION] not in [
-                    self.PARAM_RELEASE_VERSION_7, self.PARAM_RELEASE_VERSION_8,
-                    self.PARAM_RELEASE_VERSION_99
-                ]):
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with a "
-                "valid release version.".format(self.PARAM_RELEASE_VERSION))
-
-        self.set_release_version(self.bts1,
-                                 parameters[self.PARAM_RELEASE_VERSION])
-
-        # Setup W-CDMA RRC status change and CELL_DCH timer for idle test case
-        if self.PARAM_RRC_STATUS_CHANGE_TIMER not in parameters:
-            self.log.info(
-                "The config dictionary does not include a '{}' key. Disabled "
-                "by default.".format(self.PARAM_RRC_STATUS_CHANGE_TIMER))
-            self.anritsu.set_umts_rrc_status_change(False)
-        else:
-            self.rrc_sc_timer = int(
-                parameters[self.PARAM_RRC_STATUS_CHANGE_TIMER])
-            self.anritsu.set_umts_rrc_status_change(True)
-            self.anritsu.set_umts_dch_stat_timer(self.rrc_sc_timer)
-
-    def set_release_version(self, bts, release_version):
-        """ Sets the release version.
-
-        Loads the cell parameter file matching the requested release version.
-        Does nothing is release version is already the one requested.
-
-        """
-
-        if release_version == self.release_version:
-            self.log.info(
-                "Release version is already {}.".format(release_version))
-            return
-        if release_version == self.PARAM_RELEASE_VERSION_99:
-
-            cell_parameter_file = self.UMTS_R99_CELL_FILE
-            self.packet_rate = BtsPacketRate.WCDMA_DL384K_UL64K
-
-        elif release_version == self.PARAM_RELEASE_VERSION_7:
-
-            cell_parameter_file = self.UMTS_R7_CELL_FILE
-            self.packet_rate = BtsPacketRate.WCDMA_DL21_6M_UL5_76M
-
-        elif release_version == self.PARAM_RELEASE_VERSION_8:
-
-            cell_parameter_file = self.UMTS_R8_CELL_FILE
-            self.packet_rate = BtsPacketRate.WCDMA_DL43_2M_UL5_76M
-
-        else:
-            raise ValueError("Invalid UMTS release version number.")
-
-        self.anritsu.load_cell_paramfile(
-            ntpath.join(self.callbox_config_path, cell_parameter_file))
-
-        self.release_version = release_version
-
-        # Loading a cell parameter file stops the simulation
-        self.start()
-
-        bts.packet_rate = self.packet_rate
-
-    def maximum_downlink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        if self.packet_rate not in self.packet_rate_to_dl_throughput:
-            raise NotImplementedError("Packet rate not contained in the "
-                                      "throughput dictionary.")
-        return self.packet_rate_to_dl_throughput[self.packet_rate]
-
-    def maximum_uplink_throughput(self):
-        """ Calculates maximum achievable uplink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        if self.packet_rate not in self.packet_rate_to_ul_throughput:
-            raise NotImplementedError("Packet rate not contained in the "
-                                      "throughput dictionary.")
-        return self.packet_rate_to_ul_throughput[self.packet_rate]
-
-    def set_downlink_rx_power(self, bts, signal_level):
-        """ Starts IP data traffic while setting downlink power.
-
-        This is only necessary for UMTS for unclear reasons. b/139026916 """
-
-        # Starts IP traffic while changing this setting to force the UE to be
-        # in Communication state, as UL power cannot be set in Idle state
-        self.start_traffic_for_calibration()
-
-        # Wait until it goes to communication state
-        self.anritsu.wait_for_communication_state()
-
-        super().set_downlink_rx_power(bts, signal_level)
-
-        # Stop IP traffic after setting the signal level
-        self.stop_traffic_for_calibration()
-
-    def set_band(self, bts, band):
-        """ Sets the band used for communication.
-
-        Args:
-            bts: basestation handle
-            band: desired band
-        """
-
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
diff --git a/src/antlion/controllers/cellular_lib/__init__.py b/src/antlion/controllers/cellular_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/cellular_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/cellular_simulator.py b/src/antlion/controllers/cellular_simulator.py
deleted file mode 100644
index 30eeba3..0000000
--- a/src/antlion/controllers/cellular_simulator.py
+++ /dev/null
@@ -1,489 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion import logger
-from antlion.controllers import cellular_lib
-
-
-class AbstractCellularSimulator:
-    """ A generic cellular simulator controller class that can be derived to
-    implement equipment specific classes and allows the tests to be implemented
-    without depending on a singular instrument model.
-
-    This class defines the interface that every cellular simulator controller
-    needs to implement and shouldn't be instantiated by itself. """
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = None
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = None
-
-    def __init__(self):
-        """ Initializes the cellular simulator. """
-        self.log = logger.create_tagged_trace_logger('CellularSimulator')
-        self.num_carriers = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        raise NotImplementedError()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        raise NotImplementedError()
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated CA combination.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        raise NotImplementedError()
-
-    def configure_bts(self, config, bts_index=0):
-        """ Commands the equipment to setup a base station with the required
-        configuration. This method applies configurations that are common to all
-        RATs.
-
-        Args:
-            config: a BaseSimulation.BtsConfig object.
-            bts_index: the base station number.
-        """
-        self.log.info('The config for {} is {}'.format(bts_index, str(config)))
-
-        if config.output_power:
-            self.set_output_power(bts_index, config.output_power)
-
-        if config.input_power:
-            self.set_input_power(bts_index, config.input_power)
-
-        if isinstance(config, cellular_lib.LteCellConfig.LteCellConfig):
-            self.configure_lte_bts(config, bts_index)
-
-        if isinstance(config, cellular_lib.NrCellConfig.NrCellConfig):
-            self.configure_nr_bts(config, bts_index)
-
-    def configure_lte_bts(self, config, bts_index=0):
-        """ Commands the equipment to setup an LTE base station with the
-        required configuration.
-
-        Args:
-            config: an LteSimulation.BtsConfig object.
-            bts_index: the base station number.
-        """
-        if config.band:
-            self.set_band(bts_index, config.band)
-
-        if config.dlul_config:
-            self.set_tdd_config(bts_index, config.dlul_config)
-
-        if config.ssf_config:
-            self.set_ssf_config(bts_index, config.ssf_config)
-
-        if config.bandwidth:
-            self.set_bandwidth(bts_index, config.bandwidth)
-
-        if config.dl_channel:
-            self.set_downlink_channel_number(bts_index, config.dl_channel)
-
-        if config.mimo_mode:
-            self.set_mimo_mode(bts_index, config.mimo_mode)
-
-        if config.transmission_mode:
-            self.set_transmission_mode(bts_index, config.transmission_mode)
-
-        # Modulation order should be set before set_scheduling_mode being
-        # called.
-        if config.dl_256_qam_enabled is not None:
-            self.set_dl_256_qam_enabled(bts_index, config.dl_256_qam_enabled)
-
-        if config.ul_64_qam_enabled is not None:
-            self.set_ul_64_qam_enabled(bts_index, config.ul_64_qam_enabled)
-
-        if config.scheduling_mode:
-
-            if (config.scheduling_mode ==
-                    cellular_lib.LteSimulation.SchedulingMode.STATIC
-                    and not (config.dl_rbs and config.ul_rbs and config.dl_mcs
-                             and config.ul_mcs)):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            # If scheduling mode is set to Dynamic, the RB and MCS parameters
-            # will be ignored by set_scheduling_mode.
-            self.set_scheduling_mode(bts_index, config.scheduling_mode,
-                                     config.dl_mcs, config.ul_mcs,
-                                     config.dl_rbs, config.ul_rbs)
-
-        # This variable stores a boolean value so the following is needed to
-        # differentiate False from None
-        if config.mac_padding is not None:
-            self.set_mac_padding(bts_index, config.mac_padding)
-
-        if config.cfi:
-            self.set_cfi(bts_index, config.cfi)
-
-        if config.paging_cycle:
-            self.set_paging_cycle(bts_index, config.paging_cycle)
-
-        if config.phich:
-            self.set_phich_resource(bts_index, config.phich)
-
-        if config.drx_connected_mode:
-            self.set_drx_connected_mode(bts_index, config.drx_connected_mode)
-
-            if config.drx_on_duration_timer:
-                self.set_drx_on_duration_timer(bts_index,
-                                               config.drx_on_duration_timer)
-
-            if config.drx_inactivity_timer:
-                self.set_drx_inactivity_timer(bts_index,
-                                              config.drx_inactivity_timer)
-
-            if config.drx_retransmission_timer:
-                self.set_drx_retransmission_timer(
-                    bts_index, config.drx_retransmission_timer)
-
-            if config.drx_long_cycle:
-                self.set_drx_long_cycle(bts_index, config.drx_long_cycle)
-
-            if config.drx_long_cycle_offset is not None:
-                self.set_drx_long_cycle_offset(bts_index,
-                                               config.drx_long_cycle_offset)
-
-    def configure_nr_bts(self, config, bts_index=1):
-        """ Commands the equipment to setup an LTE base station with the
-        required configuration.
-
-        Args:
-            config: an LteSimulation.BtsConfig object.
-            bts_index: the base station number.
-        """
-        if config.band:
-            self.set_band(bts_index, config.band)
-
-        if config.nr_arfcn:
-            self.set_downlink_channel_number(bts_index, config.nr_arfcn)
-
-        if config.bandwidth:
-            self.set_bandwidth(bts_index, config.bandwidth)
-
-        if config.mimo_mode:
-            self.set_mimo_mode(bts_index, config.mimo_mode)
-
-        if config.scheduling_mode:
-
-            if (config.scheduling_mode ==
-                    cellular_lib.LteSimulation.SchedulingMode.STATIC
-                    and not (config.dl_rbs and config.ul_rbs and config.dl_mcs
-                             and config.ul_mcs)):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            # If scheduling mode is set to Dynamic, the RB and MCS parameters
-            # will be ignored by set_scheduling_mode.
-            self.set_scheduling_mode(bts_index, config.scheduling_mode,
-                                     config.dl_mcs, config.ul_mcs,
-                                     config.dl_rbs, config.ul_rbs)
-        if config.mac_padding is not None:
-            self.set_mac_padding(bts_index, config.mac_padding)
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        raise NotImplementedError()
-
-    def set_band(self, bts_index, band):
-        """ Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            band: the new band
-        """
-        raise NotImplementedError()
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        raise NotImplementedError()
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        raise NotImplementedError()
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """ Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tdd_config: the new tdd configuration number
-        """
-        raise NotImplementedError()
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        raise NotImplementedError()
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth
-        """
-        raise NotImplementedError()
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        raise NotImplementedError()
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """ Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        raise NotImplementedError()
-
-    def set_transmission_mode(self, bts_index, transmission_mode):
-        """ Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            transmission_mode: the new transmission mode
-        """
-        raise NotImplementedError()
-
-    def set_scheduling_mode(self, bts_index, scheduling_mode, mcs_dl, mcs_ul,
-                            nrb_dl, nrb_ul):
-        """ Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            scheduling_mode: the new scheduling mode
-            mcs_dl: Downlink MCS (only for STATIC scheduling)
-            mcs_ul: Uplink MCS (only for STATIC scheduling)
-            nrb_dl: Number of RBs for downlink (only for STATIC scheduling)
-            nrb_ul: Number of RBs for uplink (only for STATIC scheduling)
-        """
-        raise NotImplementedError()
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        raise NotImplementedError()
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        raise NotImplementedError()
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        raise NotImplementedError()
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        raise NotImplementedError()
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        raise NotImplementedError()
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        raise NotImplementedError()
-
-    def set_drx_connected_mode(self, bts_index, active):
-        """ Sets the time interval to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            active: Boolean indicating whether cDRX mode
-                is active
-        """
-        raise NotImplementedError()
-
-    def set_drx_on_duration_timer(self, bts_index, timer):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to wait and check for user data
-                after waking from the DRX cycle
-        """
-        raise NotImplementedError()
-
-    def set_drx_inactivity_timer(self, bts_index, timer):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            timer: The amount of time to wait before entering DRX mode
-        """
-        raise NotImplementedError()
-
-    def set_drx_retransmission_timer(self, bts_index, timer):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to remain active
-
-        """
-        raise NotImplementedError()
-
-    def set_drx_long_cycle(self, bts_index, cycle):
-        """ Sets the amount of subframes representing a DRX long cycle.
-
-        Args:
-            bts_index: the base station number
-            cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-        """
-        raise NotImplementedError()
-
-    def set_drx_long_cycle_offset(self, bts_index, offset):
-        """ Sets the offset used to determine the subframe number
-        to begin the long drx cycle
-
-        Args:
-            bts_index: the base station number
-            offset: Number in range 0 to (long cycle - 1)
-        """
-        raise NotImplementedError()
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-        raise NotImplementedError()
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError()
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError()
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError()
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        raise NotImplementedError()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        raise NotImplementedError()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
-
-    def get_measured_pusch_power(self):
-        """ Queries PUSCH power measured at the callbox.
-
-        Returns:
-            The PUSCH power in the primary input port.
-        """
-        raise NotImplementedError()
-
-
-class CellularSimulatorError(Exception):
-    """ Exceptions thrown when the cellular equipment is unreachable or it
-    returns an error after receiving a command. """
diff --git a/src/antlion/controllers/chameleon_controller.py b/src/antlion/controllers/chameleon_controller.py
deleted file mode 100644
index 7f8ce1a..0000000
--- a/src/antlion/controllers/chameleon_controller.py
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import xmlrpc.client
-from subprocess import call
-
-from antlion import signals
-
-MOBLY_CONTROLLER_CONFIG_NAME = "ChameleonDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "chameleon_devices"
-
-CHAMELEON_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-CHAMELEON_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-
-audio_bus_endpoints = {
-    'CROS_HEADPHONE': 'Cros device headphone',
-    'CROS_EXTERNAL_MICROPHONE': 'Cros device external microphone',
-    'PERIPHERAL_MICROPHONE': 'Peripheral microphone',
-    'PERIPHERAL_SPEAKER': 'Peripheral speaker',
-    'FPGA_LINEOUT': 'Chameleon FPGA line-out',
-    'FPGA_LINEIN': 'Chameleon FPGA line-in',
-    'BLUETOOTH_OUTPUT': 'Bluetooth module output',
-    'BLUETOOTH_INPUT': 'Bluetooth module input'
-}
-
-
-class ChameleonDeviceError(signals.ControllerError):
-    pass
-
-
-def create(configs):
-    if not configs:
-        raise ChameleonDeviceError(CHAMELEON_DEVICE_EMPTY_CONFIG_MSG)
-    elif not isinstance(configs, list):
-        raise ChameleonDeviceError(CHAMELEON_DEVICE_NOT_LIST_CONFIG_MSG)
-    elif isinstance(configs[0], str):
-        # Configs is a list of IP addresses
-        chameleons = get_instances(configs)
-    return chameleons
-
-
-def destroy(chameleons):
-    for chameleon in chameleons:
-        del chameleon
-
-
-def get_info(chameleons):
-    """Get information on a list of ChameleonDevice objects.
-
-    Args:
-        ads: A list of ChameleonDevice objects.
-
-    Returns:
-        A list of dict, each representing info for ChameleonDevice objects.
-    """
-    device_info = []
-    for chameleon in chameleons:
-        info = {"address": chameleon.address, "port": chameleon.port}
-        device_info.append(info)
-    return device_info
-
-
-def get_instances(ips):
-    """Create ChameleonDevice instances from a list of IPs.
-
-    Args:
-        ips: A list of Chameleon IPs.
-
-    Returns:
-        A list of ChameleonDevice objects.
-    """
-    return [ChameleonDevice(ip) for ip in ips]
-
-
-class ChameleonDevice:
-    """Class representing a Chameleon device.
-
-    Each object of this class represents one Chameleon device in ACTS.
-
-    Attributes:
-        address: The full address to contact the Chameleon device at
-        client: The ServiceProxy of the XMLRPC client.
-        log: A logger object.
-        port: The TCP port number of the Chameleon device.
-    """
-
-    def __init__(self, ip="", port=9992):
-        self.ip = ip
-        self.log = logging.getLogger()
-        self.port = port
-        self.address = "http://{}:{}".format(ip, self.port)
-        try:
-            self.client = xmlrpc.client.ServerProxy(self.address,
-                                                    allow_none=True,
-                                                    verbose=False)
-        except ConnectionRefusedError as err:
-            self.log.exception(
-                "Failed to connect to Chameleon Device at: {}".format(
-                    self.address))
-        self.client.Reset()
-
-    def pull_file(self, chameleon_location, destination):
-        """Pulls a file from the Chameleon device. Usually the raw audio file.
-
-        Args:
-            chameleon_location: The path to the file on the Chameleon device
-            destination: The destination to where to pull it locally.
-        """
-        # TODO: (tturney) implement
-        self.log.error("Definition not yet implemented")
-
-    def start_capturing_audio(self, port_id, has_file=True):
-        """Starts capturing audio.
-
-        Args:
-            port_id: The ID of the audio input port.
-            has_file: True for saving audio data to file. False otherwise.
-        """
-        self.client.StartCapturingAudio(port_id, has_file)
-
-    def stop_capturing_audio(self, port_id):
-        """Stops capturing audio.
-
-        Args:
-            port_id: The ID of the audio input port.
-        Returns:
-            List contain the location of the recorded audio and a dictionary
-            of values relating to the raw audio including: file_type, channel,
-            sample_format, and rate.
-        """
-        return self.client.StopCapturingAudio(port_id)
-
-    def audio_board_connect(self, bus_number, endpoint):
-        """Connects an endpoint to an audio bus.
-
-        Args:
-            bus_number: 1 or 2 for audio bus 1 or bus 2.
-            endpoint: An endpoint defined in audio_bus_endpoints.
-        """
-        self.client.AudioBoardConnect(bus_number, endpoint)
-
-    def audio_board_disconnect(self, bus_number, endpoint):
-        """Connects an endpoint to an audio bus.
-
-        Args:
-            bus_number: 1 or 2 for audio bus 1 or bus 2.
-            endpoint: An endpoint defined in audio_bus_endpoints.
-        """
-        self.client.AudioBoardDisconnect(bus_number, endpoint)
-
-    def audio_board_disable_bluetooth(self):
-        """Disables Bluetooth module on audio board."""
-        self.client.AudioBoardDisableBluetooth()
-
-    def audio_board_clear_routes(self, bus_number):
-        """Clears routes on an audio bus.
-
-        Args:
-            bus_number: 1 or 2 for audio bus 1 or bus 2.
-        """
-        self.client.AudioBoardClearRoutes(bus_number)
-
-    def scp(self, source, destination):
-        """Copies files from the Chameleon device to the host machine.
-
-        Args:
-            source: The file path on the Chameleon board.
-            dest: The file path on the host machine.
-        """
-        cmd = "scp root@{}:/{} {}".format(self.ip, source, destination)
-        try:
-            call(cmd.split(" "))
-        except FileNotFoundError as err:
-            self.log.exception("File not found {}".format(source))
diff --git a/src/antlion/controllers/fastboot.py b/src/antlion/controllers/fastboot.py
index 0b889fa..ed67245 100755
--- a/src/antlion/controllers/fastboot.py
+++ b/src/antlion/controllers/fastboot.py
@@ -30,12 +30,12 @@
         self.ret_code = ret_code
 
     def __str__(self):
-        return ("Error executing fastboot cmd '%s'. ret: %d, stdout: %s,"
-                " stderr: %s") % (self.cmd, self.ret_code, self.stdout,
-                                  self.stderr)
+        return (
+            "Error executing fastboot cmd '%s'. ret: %d, stdout: %s," " stderr: %s"
+        ) % (self.cmd, self.ret_code, self.stdout, self.stderr)
 
 
-class FastbootProxy():
+class FastbootProxy:
     """Proxy class for fastboot.
 
     For syntactic reasons, the '-' in fastboot commands need to be replaced
@@ -52,16 +52,10 @@
             self.fastboot_str = "fastboot"
         self.ssh_connection = ssh_connection
 
-    def _exec_fastboot_cmd(self,
-                           name,
-                           arg_str,
-                           ignore_status=False,
-                           timeout=60):
-        command = ' '.join((self.fastboot_str, name, arg_str))
+    def _exec_fastboot_cmd(self, name, arg_str, ignore_status=False, timeout=60):
+        command = " ".join((self.fastboot_str, name, arg_str))
         if self.ssh_connection:
-            result = self.connection.run(command,
-                                         ignore_status=True,
-                                         timeout=timeout)
+            result = self.connection.run(command, ignore_status=True, timeout=timeout)
         else:
             result = job.run(command, ignore_status=True, timeout=timeout)
         ret, out, err = result.exit_status, result.stdout, result.stderr
@@ -72,18 +66,15 @@
         if ret == 0 or ignore_status:
             return out
         else:
-            raise FastbootError(cmd=command,
-                                stdout=out,
-                                stderr=err,
-                                ret_code=ret)
+            raise FastbootError(cmd=command, stdout=out, stderr=err, ret_code=ret)
 
     def args(self, *args, **kwargs):
-        return job.run(' '.join((self.fastboot_str, ) + args), **kwargs).stdout
+        return job.run(" ".join((self.fastboot_str,) + args), **kwargs).stdout
 
     def __getattr__(self, name):
         def fastboot_call(*args, **kwargs):
-            clean_name = name.replace('_', '-')
-            arg_str = ' '.join(str(elem) for elem in args)
+            clean_name = name.replace("_", "-")
+            arg_str = " ".join(str(elem) for elem in args)
             return self._exec_fastboot_cmd(clean_name, arg_str, **kwargs)
 
         return fastboot_call
diff --git a/src/antlion/controllers/fuchsia_device.py b/src/antlion/controllers/fuchsia_device.py
index c0d62c7..414afb4 100644
--- a/src/antlion/controllers/fuchsia_device.py
+++ b/src/antlion/controllers/fuchsia_device.py
@@ -27,16 +27,26 @@
 from antlion import logger as acts_logger
 from antlion import signals
 from antlion import utils
+from antlion.capabilities.ssh import DEFAULT_SSH_PORT, SSHConfig, SSHError
 from antlion.controllers import pdu
 from antlion.controllers.fuchsia_lib.ffx import FFX
-from antlion.controllers.fuchsia_lib.lib_controllers.netstack_controller import NetstackController
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import WlanController
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import WlanPolicyController
+from antlion.controllers.fuchsia_lib.lib_controllers.netstack_controller import (
+    NetstackController,
+)
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import (
+    WlanController,
+)
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyController,
+)
 from antlion.controllers.fuchsia_lib.package_server import PackageServer
 from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import DEFAULT_SSH_PORT, DEFAULT_SSH_PRIVATE_KEY, DEFAULT_SSH_USER, SSHConfig, SSHProvider, FuchsiaSSHError
+from antlion.controllers.fuchsia_lib.ssh import (
+    DEFAULT_SSH_PRIVATE_KEY,
+    DEFAULT_SSH_USER,
+    FuchsiaSSHProvider,
+)
 from antlion.controllers.fuchsia_lib.utils_lib import flash
-from antlion.libs.proc import job
 from antlion.utils import get_fuchsia_mdns_ipv6_address, get_interface_ip_addresses
 
 MOBLY_CONTROLLER_CONFIG_NAME = "FuchsiaDevice"
@@ -44,8 +54,10 @@
 
 FUCHSIA_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
 FUCHSIA_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-FUCHSIA_DEVICE_INVALID_CONFIG = ("Fuchsia device config must be either a str "
-                                 "or dict. abort! Invalid element %i in %r")
+FUCHSIA_DEVICE_INVALID_CONFIG = (
+    "Fuchsia device config must be either a str "
+    "or dict. abort! Invalid element %i in %r"
+)
 FUCHSIA_DEVICE_NO_IP_MSG = "No IP address specified, abort!"
 FUCHSIA_COULD_NOT_GET_DESIRED_STATE = "Could not %s %s."
 FUCHSIA_INVALID_CONTROL_STATE = "Invalid control state (%s). abort!"
@@ -62,9 +74,9 @@
 
 CHANNEL_OPEN_TIMEOUT = 5
 
-FUCHSIA_REBOOT_TYPE_SOFT = 'soft'
-FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH = 'flash'
-FUCHSIA_REBOOT_TYPE_HARD = 'hard'
+FUCHSIA_REBOOT_TYPE_SOFT = "soft"
+FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH = "flash"
+FUCHSIA_REBOOT_TYPE_HARD = "hard"
 
 FUCHSIA_DEFAULT_CONNECT_TIMEOUT = 90
 FUCHSIA_DEFAULT_COMMAND_TIMEOUT = 60
@@ -72,11 +84,11 @@
 FUCHSIA_DEFAULT_CLEAN_UP_COMMAND_TIMEOUT = 15
 
 FUCHSIA_COUNTRY_CODE_TIMEOUT = 15
-FUCHSIA_DEFAULT_COUNTRY_CODE_US = 'US'
+FUCHSIA_DEFAULT_COUNTRY_CODE_US = "US"
 
 MDNS_LOOKUP_RETRY_MAX = 3
 
-VALID_ASSOCIATION_MECHANISMS = {None, 'policy', 'drivers'}
+VALID_ASSOCIATION_MECHANISMS = {None, "policy", "drivers"}
 IP_ADDRESS_TIMEOUT = 15
 
 
@@ -97,8 +109,7 @@
         if isinstance(config, str):
             configs[index] = {"ip": config}
         elif not isinstance(config, dict):
-            raise FuchsiaDeviceError(FUCHSIA_DEVICE_INVALID_CONFIG %
-                                     (index, configs))
+            raise FuchsiaDeviceError(FUCHSIA_DEVICE_INVALID_CONFIG % (index, configs))
     return get_instances(configs)
 
 
@@ -158,15 +169,14 @@
         self.ip: str = fd_conf_data["ip"]
         self.orig_ip: str = fd_conf_data["ip"]
         self.sl4f_port: int = fd_conf_data.get("sl4f_port", 80)
-        self.ssh_username: str = fd_conf_data.get("ssh_username",
-                                                  DEFAULT_SSH_USER)
+        self.ssh_username: str = fd_conf_data.get("ssh_username", DEFAULT_SSH_USER)
         self.ssh_port: int = fd_conf_data.get("ssh_port", DEFAULT_SSH_PORT)
+        self.ssh_binary_path: str = fd_conf_data.get("ssh_binary_path", "ssh")
 
         def expand(path: str) -> str:
             return os.path.expandvars(os.path.expanduser(path))
 
-        def path_from_config(name: str,
-                             default: Optional[str] = None) -> Optional[str]:
+        def path_from_config(name: str, default: Optional[str] = None) -> Optional[str]:
             path = fd_conf_data.get(name, default)
             if not path:
                 return path
@@ -175,11 +185,13 @@
         def assert_exists(name: str, path: str) -> None:
             if not path:
                 raise FuchsiaDeviceError(
-                    f'Please specify "${name}" in your configuration file')
+                    f'Please specify "${name}" in your configuration file'
+                )
             if not os.path.exists(path):
                 raise FuchsiaDeviceError(
                     f'Please specify a correct "${name}" in your configuration '
-                    f'file: "{path}" does not exist')
+                    f'file: "{path}" does not exist'
+                )
 
         self.specific_image: Optional[str] = path_from_config("specific_image")
         if self.specific_image:
@@ -188,71 +200,76 @@
         # Path to a tar.gz archive with pm and amber-files, as necessary for
         # starting a package server.
         self.packages_archive_path: Optional[str] = path_from_config(
-            "packages_archive_path", None)
+            "packages_archive_path", None
+        )
         if self.packages_archive_path:
             assert_exists("packages_archive_path", self.packages_archive_path)
 
-        def required_path_from_config(name: str,
-                                      default: Optional[str] = None) -> str:
+        def required_path_from_config(name: str, default: Optional[str] = None) -> str:
             path = path_from_config(name, default)
             assert_exists(name, path)
             return path
 
         self.ssh_priv_key: str = required_path_from_config(
-            "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY)
-        self.authorized_file: str = required_path_from_config(
-            "authorized_file_loc", f'{self.ssh_priv_key}.pub')
+            "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY
+        )
         self.ffx_binary_path: str = required_path_from_config(
-            "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx")
+            "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx"
+        )
 
-        self.serial_number: Optional[str] = fd_conf_data.get(
-            "serial_number", None)
+        self.authorized_file: Optional[str] = fd_conf_data.get(
+            "authorized_file_loc", None
+        )
+        self.serial_number: Optional[str] = fd_conf_data.get("serial_number", None)
         self.device_type: Optional[str] = fd_conf_data.get("device_type", None)
-        self.product_type: Optional[str] = fd_conf_data.get(
-            "product_type", None)
+        self.product_type: Optional[str] = fd_conf_data.get("product_type", None)
         self.board_type: Optional[str] = fd_conf_data.get("board_type", None)
-        self.build_number: Optional[str] = fd_conf_data.get(
-            "build_number", None)
+        self.build_number: Optional[str] = fd_conf_data.get("build_number", None)
         self.build_type: Optional[str] = fd_conf_data.get("build_type", None)
 
         self.mdns_name: Optional[str] = fd_conf_data.get("mdns_name", None)
 
-        self.hard_reboot_on_fail: bool = fd_conf_data.get(
-            "hard_reboot_on_fail", False)
+        self.hard_reboot_on_fail: bool = fd_conf_data.get("hard_reboot_on_fail", False)
         self.take_bug_report_on_fail: bool = fd_conf_data.get(
-            "take_bug_report_on_fail", False)
+            "take_bug_report_on_fail", False
+        )
         self.device_pdu_config = fd_conf_data.get("PduDevice", None)
         self.config_country_code: str = fd_conf_data.get(
-            'country_code', FUCHSIA_DEFAULT_COUNTRY_CODE_US).upper()
+            "country_code", FUCHSIA_DEFAULT_COUNTRY_CODE_US
+        ).upper()
 
         output_path = context.get_current_context().get_base_output_path()
-        self.ssh_config = os.path.join(output_path,
-                                       "ssh_config_{}".format(self.ip))
+        self.ssh_config = os.path.join(output_path, "ssh_config_{}".format(self.ip))
         self._generate_ssh_config(self.ssh_config)
 
         # WLAN interface info is populated inside configure_wlan
         self.wlan_client_interfaces = {}
         self.wlan_ap_interfaces = {}
         self.wlan_client_test_interface_name = fd_conf_data.get(
-            'wlan_client_test_interface', None)
+            "wlan_client_test_interface", None
+        )
         self.wlan_ap_test_interface_name = fd_conf_data.get(
-            'wlan_ap_test_interface', None)
-        self.wlan_features: List[str] = fd_conf_data.get('wlan_features', [])
+            "wlan_ap_test_interface", None
+        )
+        self.wlan_features: List[str] = fd_conf_data.get("wlan_features", [])
 
         # Whether to use 'policy' or 'drivers' for WLAN connect/disconnect calls
         # If set to None, wlan is not configured.
         self.association_mechanism = None
         # Defaults to policy layer, unless otherwise specified in the config
         self.default_association_mechanism = fd_conf_data.get(
-            'association_mechanism', 'policy')
+            "association_mechanism", "policy"
+        )
 
         # Whether to clear and preserve existing saved networks and client
         # connections state, to be restored at device teardown.
         self.default_preserve_saved_networks = fd_conf_data.get(
-            'preserve_saved_networks', True)
+            "preserve_saved_networks", True
+        )
 
-        if not utils.is_valid_ipv4_address(
-                self.ip) and not utils.is_valid_ipv6_address(self.ip):
+        if not utils.is_valid_ipv4_address(self.ip) and not utils.is_valid_ipv6_address(
+            self.ip
+        ):
             mdns_ip = None
             for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
                 mdns_ip = get_fuchsia_mdns_ipv6_address(self.ip)
@@ -266,19 +283,21 @@
                 self.mdns_name = self.mdns_name or self.ip
                 self.ip = mdns_ip
             else:
-                raise ValueError('Invalid IP: %s' % self.ip)
+                raise ValueError("Invalid IP: %s" % self.ip)
 
         self.log = acts_logger.create_tagged_trace_logger(
-            "FuchsiaDevice | %s" % self.orig_ip)
+            "FuchsiaDevice | %s" % self.orig_ip
+        )
 
-        self.ping_rtt_match = re.compile(r'RTT Min/Max/Avg '
-                                         r'= \[ (.*?) / (.*?) / (.*?) \] ms')
-        self.serial = re.sub('[.:%]', '_', self.ip)
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_path = os.path.join(log_path_base,
-                                     'FuchsiaDevice%s' % self.serial)
+        self.ping_rtt_match = re.compile(
+            r"RTT Min/Max/Avg " r"= \[ (.*?) / (.*?) / (.*?) \] ms"
+        )
+        self.serial = re.sub("[.:%]", "_", self.ip)
+        log_path_base = getattr(logging, "log_path", "/tmp/logs")
+        self.log_path = os.path.join(log_path_base, "FuchsiaDevice%s" % self.serial)
         self.fuchsia_log_file_path = os.path.join(
-            self.log_path, "fuchsialog_%s_debug.txt" % self.serial)
+            self.log_path, "fuchsialog_%s_debug.txt" % self.serial
+        )
         self.log_process = None
         self.package_server = None
 
@@ -291,38 +310,46 @@
         The sl4f module uses lazy-initialization; it will initialize an sl4f
         server on the host device when it is required.
         """
-        if not hasattr(self, '_sl4f'):
+        if not hasattr(self, "_sl4f"):
             self._sl4f = SL4F(self.ssh, self.sl4f_port)
-            self.log.info('Started SL4F server')
+            self.log.info("Started SL4F server")
         return self._sl4f
 
     @sl4f.deleter
     def sl4f(self):
-        if not hasattr(self, '_sl4f'):
+        if not hasattr(self, "_sl4f"):
             return
-        self.log.debug('Cleaning up SL4F')
+        self.log.debug("Cleaning up SL4F")
         del self._sl4f
 
     @property
     def ssh(self):
         """Get the SSH provider module configured for this device."""
-        if not hasattr(self, '_ssh'):
+        if not hasattr(self, "_ssh"):
             if not self.ssh_port:
                 raise FuchsiaConfigError(
-                    'Must provide "ssh_port: <int>" in the device config')
+                    'Must provide "ssh_port: <int>" in the device config'
+                )
             if not self.ssh_priv_key:
                 raise FuchsiaConfigError(
                     'Must provide "ssh_priv_key: <file path>" in the device config'
                 )
-            self._ssh = SSHProvider(
-                SSHConfig(self.ip, self.ssh_priv_key, port=self.ssh_port))
+            self._ssh = FuchsiaSSHProvider(
+                SSHConfig(
+                    self.ssh_username,
+                    self.ip,
+                    self.ssh_priv_key,
+                    port=self.ssh_port,
+                    ssh_binary=self.ssh_binary_path,
+                )
+            )
         return self._ssh
 
     @ssh.deleter
     def ssh(self):
-        if not hasattr(self, '_ssh'):
+        if not hasattr(self, "_ssh"):
             return
-        self.log.debug('Cleaning up SSH')
+        self.log.debug("Cleaning up SSH")
         del self._ssh
 
     @property
@@ -336,20 +363,21 @@
         access again. Note re-initialization will interrupt any running ffx
         calls.
         """
-        if not hasattr(self, '_ffx'):
+        if not hasattr(self, "_ffx"):
             if not self.mdns_name:
                 raise FuchsiaConfigError(
                     'Must provide "mdns_name: <device mDNS name>" in the device config'
                 )
-            self._ffx = FFX(self.ffx_binary_path, self.mdns_name, self.ip,
-                            self.ssh_priv_key)
+            self._ffx = FFX(
+                self.ffx_binary_path, self.mdns_name, self.ip, self.ssh_priv_key
+            )
         return self._ffx
 
     @ffx.deleter
     def ffx(self):
-        if not hasattr(self, '_ffx'):
+        if not hasattr(self, "_ffx"):
             return
-        self.log.debug('Cleaning up ffx')
+        self.log.debug("Cleaning up ffx")
         self._ffx.clean_up()
         del self._ffx
 
@@ -359,7 +387,8 @@
         Args:
             file_path: Path to write the generated SSH config
         """
-        content = textwrap.dedent(f"""\
+        content = textwrap.dedent(
+            f"""\
             Host *
                 CheckHostIP no
                 StrictHostKeyChecking no
@@ -376,9 +405,10 @@
                 ServerAliveInterval 1
                 ServerAliveCountMax 1
                 LogLevel ERROR
-            """)
+            """
+        )
 
-        with open(file_path, 'w') as file:
+        with open(file_path, "w") as file:
             file.write(content)
 
     def init_controllers(self):
@@ -389,7 +419,7 @@
         self.wlan_controller = WlanController(self)
 
         # Contains WLAN policy functions like save_network, remove_network, etc
-        self.wlan_policy_controller = WlanPolicyController(self.sl4f, self.ffx)
+        self.wlan_policy_controller = WlanPolicyController(self.sl4f, self.ssh)
 
     def start_package_server(self):
         if not self.packages_archive_path:
@@ -399,7 +429,8 @@
                 "the DUT. If this is not the case, either run your own package "
                 "server, or configure these fields appropriately. "
                 "This is usually required for the Fuchsia iPerf3 client or "
-                "other testing utilities not on device cache.")
+                "other testing utilities not on device cache."
+            )
             return
         if self.package_server:
             self.log.warn(
@@ -427,16 +458,18 @@
         """
         for cmd_dict in cmd_dicts:
             try:
-                cmd = cmd_dict['cmd']
+                cmd = cmd_dict["cmd"]
             except KeyError:
                 raise FuchsiaDeviceError(
                     'To run a command via config, you must provide key "cmd" '
-                    'containing the command string.')
+                    "containing the command string."
+                )
 
-            timeout = cmd_dict.get('timeout', FUCHSIA_DEFAULT_COMMAND_TIMEOUT)
+            timeout = cmd_dict.get("timeout", FUCHSIA_DEFAULT_COMMAND_TIMEOUT)
             # Catch both boolean and string values from JSON
-            skip_status_code_check = 'true' == str(
-                cmd_dict.get('skip_status_code_check', False)).lower()
+            skip_status_code_check = (
+                "true" == str(cmd_dict.get("skip_status_code_check", False)).lower()
+            )
 
             if skip_status_code_check:
                 self.log.info(f'Running command "{cmd}" and ignoring result.')
@@ -446,23 +479,23 @@
             try:
                 result = self.ssh.run(cmd, timeout_sec=timeout)
                 self.log.debug(result)
-            except FuchsiaSSHError as e:
+            except SSHError as e:
                 if not skip_status_code_check:
                     raise FuchsiaDeviceError(
-                        'Failed device specific commands for initial configuration'
+                        "Failed device specific commands for initial configuration"
                     ) from e
 
-    def configure_wlan(self,
-                       association_mechanism=None,
-                       preserve_saved_networks=None):
+    def configure_wlan(
+        self, association_mechanism: str = None, preserve_saved_networks: bool = None
+    ) -> None:
         """
         Readies device for WLAN functionality. If applicable, connects to the
         policy layer and clears/saves preexisting saved networks.
 
         Args:
-            association_mechanism: string, 'policy' or 'drivers'. If None, uses
+            association_mechanism: either 'policy' or 'drivers'. If None, uses
                 the default value from init (can be set by ACTS config)
-            preserve_saved_networks: bool, whether to clear existing saved
+            preserve_saved_networks: whether to clear existing saved
                 networks, and preserve them for restoration later. If None, uses
                 the default value from init (can be set by ACTS config)
 
@@ -483,28 +516,32 @@
 
         if association_mechanism not in VALID_ASSOCIATION_MECHANISMS:
             raise FuchsiaDeviceError(
-                'Invalid FuchsiaDevice association_mechanism: %s' %
-                association_mechanism)
+                "Invalid FuchsiaDevice association_mechanism: %s"
+                % association_mechanism
+            )
 
         # Allows for wlan to be set up differently in different tests
         if self.association_mechanism:
-            self.log.info('Deconfiguring WLAN')
+            self.log.info("Deconfiguring WLAN")
             self.deconfigure_wlan()
 
         self.association_mechanism = association_mechanism
 
-        self.log.info('Configuring WLAN w/ association mechanism: %s' %
-                      association_mechanism)
-        if association_mechanism == 'drivers':
+        self.log.info(
+            "Configuring WLAN w/ association mechanism: " f"{association_mechanism}"
+        )
+        if association_mechanism == "drivers":
             self.log.warn(
-                'You may encounter unusual device behavior when using the '
-                'drivers directly for WLAN. This should be reserved for '
-                'debugging specific issues. Normal test runs should use the '
-                'policy layer.')
+                "You may encounter unusual device behavior when using the "
+                "drivers directly for WLAN. This should be reserved for "
+                "debugging specific issues. Normal test runs should use the "
+                "policy layer."
+            )
             if preserve_saved_networks:
                 self.log.warn(
-                    'Unable to preserve saved networks when using drivers '
-                    'association mechanism (requires policy layer control).')
+                    "Unable to preserve saved networks when using drivers "
+                    "association mechanism (requires policy layer control)."
+                )
         else:
             # This requires SL4F calls, so it can only happen with actual
             # devices, not with unit tests.
@@ -524,22 +561,21 @@
             FuchsiaDeviveError, if deconfigure fails.
         """
         if not self.association_mechanism:
-            self.log.debug(
-                'WLAN not configured before deconfigure was called.')
+            self.log.debug("WLAN not configured before deconfigure was called.")
             return
         # If using policy, stop client connections. Otherwise, just clear
         # variables.
-        if self.association_mechanism != 'drivers':
+        if self.association_mechanism != "drivers":
             self.wlan_policy_controller._deconfigure_wlan()
         self.association_mechanism = None
 
-    def reboot(self,
-               use_ssh: bool = False,
-               unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-               ping_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-               ssh_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-               reboot_type: int = FUCHSIA_REBOOT_TYPE_SOFT,
-               testbed_pdus: List[pdu.PduDevice] = None) -> None:
+    def reboot(
+        self,
+        use_ssh: bool = False,
+        unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
+        reboot_type: str = FUCHSIA_REBOOT_TYPE_SOFT,
+        testbed_pdus: List[pdu.PduDevice] = [],
+    ) -> None:
         """Reboot a FuchsiaDevice.
 
         Soft reboots the device, verifies it becomes unreachable, then verifies
@@ -549,8 +585,6 @@
             use_ssh: if True, use fuchsia shell command via ssh to reboot
                 instead of SL4F.
             unreachable_timeout: time to wait for device to become unreachable.
-            ping_timeout:time to wait for device to respond to pings.
-            ssh_timeout: time to wait for device to be reachable via ssh.
             reboot_type: 'soft', 'hard' or 'flash'.
             testbed_pdus: all testbed PDUs.
 
@@ -560,57 +594,43 @@
         """
         if reboot_type == FUCHSIA_REBOOT_TYPE_SOFT:
             if use_ssh:
-                self.log.info('Soft rebooting via SSH')
+                self.log.info("Soft rebooting via SSH")
                 try:
                     self.ssh.run(
-                        'dm reboot',
-                        timeout_sec=FUCHSIA_RECONNECT_AFTER_REBOOT_TIME)
-                except FuchsiaSSHError as e:
-                    if 'closed by remote host' not in e.result.stderr:
+                        "dm reboot", timeout_sec=FUCHSIA_RECONNECT_AFTER_REBOOT_TIME
+                    )
+                except SSHError as e:
+                    if "closed by remote host" not in e.result.stderr:
                         raise e
             else:
-                self.log.info('Soft rebooting via SL4F')
-                self.sl4f.hardware_power_statecontrol_lib.suspendReboot(
-                    timeout=3)
-            self._check_unreachable(timeout_sec=unreachable_timeout)
+                self.log.info("Soft rebooting via SL4F")
+                self.sl4f.hardware_power_statecontrol_lib.suspendReboot(timeout=3)
+            self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout)
 
         elif reboot_type == FUCHSIA_REBOOT_TYPE_HARD:
-            self.log.info('Hard rebooting via PDU')
+            self.log.info("Hard rebooting via PDU")
             if not testbed_pdus:
-                raise AttributeError('Testbed PDUs must be supplied '
-                                     'to hard reboot a fuchsia_device.')
+                raise AttributeError(
+                    "Testbed PDUs must be supplied " "to hard reboot a fuchsia_device."
+                )
             device_pdu, device_pdu_port = pdu.get_pdu_port_for_device(
-                self.device_pdu_config, testbed_pdus)
-            self.log.info('Killing power to FuchsiaDevice')
+                self.device_pdu_config, testbed_pdus
+            )
+            self.log.info("Killing power to FuchsiaDevice")
             device_pdu.off(str(device_pdu_port))
-            self._check_unreachable(timeout_sec=unreachable_timeout)
-            self.log.info('Restoring power to FuchsiaDevice')
+            self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout)
+            self.log.info("Restoring power to FuchsiaDevice")
             device_pdu.on(str(device_pdu_port))
 
         elif reboot_type == FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH:
             flash(self, use_ssh, FUCHSIA_RECONNECT_AFTER_REBOOT_TIME)
 
         else:
-            raise ValueError('Invalid reboot type: %s' % reboot_type)
-
-        self._check_reachable(timeout_sec=ping_timeout)
+            raise ValueError("Invalid reboot type: %s" % reboot_type)
 
         # Cleanup services
         self.stop_services()
 
-        self.log.info('Waiting for device to allow ssh connection.')
-        end_time = time.time() + ssh_timeout
-        while time.time() < end_time:
-            try:
-                self.ssh.run('echo')
-            except Exception as e:
-                self.log.debug(f'Retrying SSH to device. Details: {e}')
-            else:
-                break
-        else:
-            raise ConnectionError('Failed to connect to device via SSH.')
-        self.log.info('Device now available via ssh.')
-
         # TODO (b/246852449): Move configure_wlan to other controllers.
         # If wlan was configured before reboot, it must be configured again
         # after rebooting, as it was before reboot. No preserving should occur.
@@ -620,49 +640,32 @@
             self.association_mechanism = None
             self.configure_wlan(
                 association_mechanism=pre_reboot_association_mechanism,
-                preserve_saved_networks=False)
-
-        self.log.info('Device has rebooted')
-
-    def version(self):
-        """Returns the version of Fuchsia running on the device.
-
-        Returns:
-            A string containing the Fuchsia version number or nothing if there
-            is no version information attached during the build.
-            For example, "5.20210713.2.1" or "".
-
-        Raises:
-            FFXTimeout: when the command times out.
-            FFXError: when the command returns non-zero and skip_status_code_check is False.
-        """
-        target_info_json = self.ffx.run("target show --json").stdout
-        target_info = json.loads(target_info_json)
-        build_info = [
-            entry for entry in target_info if entry["label"] == "build"
-        ]
-        if len(build_info) != 1:
-            self.log.warning(
-                f'Expected one entry with label "build", found {build_info}')
-            return ""
-        version_info = [
-            child for child in build_info[0]["child"]
-            if child["label"] == "version"
-        ]
-        if len(version_info) != 1:
-            self.log.warning(
-                f'Expected one entry child with label "version", found {build_info}'
+                preserve_saved_networks=False,
             )
-            return ""
-        return version_info[0]["value"]
 
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=25,
-             additional_ping_params=None):
+        self.log.info("Device has rebooted")
+
+    def version(self) -> str:
+        """Return the version of Fuchsia running on the device."""
+        return self.sl4f.device_lib.get_version()["result"]
+
+    def device_name(self) -> str:
+        """Return the name of the device."""
+        return self.sl4f.device_lib.get_device_name()["result"]
+
+    def product_name(self) -> str:
+        """Return the product name of the device."""
+        return self.sl4f.device_lib.get_product_name()["result"]
+
+    def ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
         """Pings from a Fuchsia device to an IPv4 address or hostname
 
         Args:
@@ -690,49 +693,54 @@
         rtt_avg = None
         self.log.debug("Pinging %s..." % dest_ip)
         if not additional_ping_params:
-            additional_ping_params = ''
+            additional_ping_params = ""
 
         try:
             ping_result = self.ssh.run(
-                f'ping -c {count} -i {interval} -t {timeout} -s {size} '
-                f'{additional_ping_params} {dest_ip}')
-        except FuchsiaSSHError as e:
+                f"ping -c {count} -i {interval} -t {timeout} -s {size} "
+                f"{additional_ping_params} {dest_ip}"
+            )
+        except SSHError as e:
             ping_result = e.result
 
         if ping_result.stderr:
             status = False
         else:
             status = True
-            rtt_line = ping_result.stdout.split('\n')[:-1]
+            rtt_line = ping_result.stdout.split("\n")[:-1]
             rtt_line = rtt_line[-1]
             rtt_stats = re.search(self.ping_rtt_match, rtt_line)
             rtt_min = rtt_stats.group(1)
             rtt_max = rtt_stats.group(2)
             rtt_avg = rtt_stats.group(3)
         return {
-            'status': status,
-            'rtt_min': rtt_min,
-            'rtt_max': rtt_max,
-            'rtt_avg': rtt_avg,
-            'stdout': ping_result.stdout,
-            'stderr': ping_result.stderr
+            "status": status,
+            "rtt_min": rtt_min,
+            "rtt_max": rtt_max,
+            "rtt_avg": rtt_avg,
+            "stdout": ping_result.stdout,
+            "stderr": ping_result.stderr,
         }
 
-    def can_ping(self,
-                 dest_ip,
-                 count=1,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
+    def can_ping(
+        self,
+        dest_ip,
+        count=1,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ) -> bool:
         """Returns whether fuchsia device can ping a given dest address"""
-        ping_result = self.ping(dest_ip,
-                                count=count,
-                                interval=interval,
-                                timeout=timeout,
-                                size=size,
-                                additional_ping_params=additional_ping_params)
-        return ping_result['status']
+        ping_result = self.ping(
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
+        return ping_result["status"]
 
     def clean_up(self):
         """Cleans up the FuchsiaDevice object, releases any resources it
@@ -744,12 +752,11 @@
         thread will never join and will leave tests hanging.
         """
         # If and only if wlan is configured, and using the policy layer
-        if self.association_mechanism == 'policy':
+        if self.association_mechanism == "policy":
             try:
                 self.wlan_policy_controller.clean_up()
             except Exception as err:
-                self.log.warning('Unable to clean up WLAN Policy layer: %s' %
-                                 err)
+                self.log.warning("Unable to clean up WLAN Policy layer: %s" % err)
 
         self.stop_services()
 
@@ -771,23 +778,25 @@
             timeout.
         """
         self.log.info(
-            f'Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds.'
+            f"Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
         )
         timeout = time.time() + IP_ADDRESS_TIMEOUT
         while time.time() < timeout:
             ip_addrs = self.get_interface_ip_addresses(interface)
 
-            if len(ip_addrs['ipv4_private']) > 0:
-                self.log.info("Device has an ipv4 address: "
-                              f"{ip_addrs['ipv4_private'][0]}")
+            if len(ip_addrs["ipv4_private"]) > 0:
+                self.log.info(
+                    "Device has an ipv4 address: " f"{ip_addrs['ipv4_private'][0]}"
+                )
                 break
             else:
                 self.log.debug(
-                    'Device does not yet have an ipv4 address...retrying in 1 '
-                    'second.')
+                    "Device does not yet have an ipv4 address...retrying in 1 "
+                    "second."
+                )
                 time.sleep(1)
         else:
-            raise ConnectionError('Device failed to get an ipv4 address.')
+            raise ConnectionError("Device failed to get an ipv4 address.")
 
     def wait_for_ipv6_addr(self, interface: str) -> None:
         """Checks if device has an ipv6 private local address. Sleeps 1 second
@@ -801,58 +810,25 @@
             timeout.
         """
         self.log.info(
-            f'Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds.'
+            f"Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
         )
         timeout = time.time() + IP_ADDRESS_TIMEOUT
         while time.time() < timeout:
             ip_addrs = self.get_interface_ip_addresses(interface)
-            if len(ip_addrs['ipv6_private_local']) > 0:
-                self.log.info("Device has an ipv6 private local address: "
-                              f"{ip_addrs['ipv6_private_local'][0]}")
+            if len(ip_addrs["ipv6_private_local"]) > 0:
+                self.log.info(
+                    "Device has an ipv6 private local address: "
+                    f"{ip_addrs['ipv6_private_local'][0]}"
+                )
                 break
             else:
                 self.log.debug(
-                    'Device does not yet have an ipv6 address...retrying in 1 '
-                    'second.')
+                    "Device does not yet have an ipv6 address...retrying in 1 "
+                    "second."
+                )
                 time.sleep(1)
         else:
-            raise ConnectionError('Device failed to get an ipv6 address.')
-
-    def _check_reachable(self,
-                         timeout_sec: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT
-                         ) -> None:
-        """Checks the reachability of the Fuchsia device."""
-        end_time = time.time() + timeout_sec
-        self.log.info('Verifying device is reachable.')
-        while time.time() < end_time:
-            # TODO (b/249343632): Consolidate ping commands and fix timeout in
-            # utils.can_ping.
-            if utils.can_ping(job, self.ip):
-                self.log.info('Device is reachable.')
-                break
-            else:
-                self.log.debug(
-                    'Device is not reachable. Retrying in 1 second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError('Device is unreachable.')
-
-    def _check_unreachable(self,
-                           timeout_sec: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT
-                           ) -> None:
-        """Checks the Fuchsia device becomes unreachable."""
-        end_time = time.time() + timeout_sec
-        self.log.info('Verifying device is unreachable.')
-        while (time.time() < end_time):
-            if utils.can_ping(job, self.ip):
-                self.log.debug(
-                    'Device is still reachable. Retrying in 1 second.')
-                time.sleep(1)
-            else:
-                self.log.info('Device is not reachable.')
-                break
-        else:
-            raise ConnectionError('Device failed to become unreachable.')
+            raise ConnectionError("Device failed to get an ipv6 address.")
 
     def check_connect_response(self, connect_response):
         if connect_response.get("error") is None:
@@ -869,8 +845,10 @@
                 return True
         else:
             # the response indicates an error - log and raise failure
-            self.log.debug("Aborting! - Connect call failed with error: %s" %
-                           connect_response.get("error"))
+            self.log.debug(
+                "Aborting! - Connect call failed with error: %s"
+                % connect_response.get("error")
+            )
             return False
 
     def check_disconnect_response(self, disconnect_response):
@@ -879,8 +857,10 @@
             return True
         else:
             # the response indicates an error - log and raise failure
-            self.log.debug("Disconnect call failed with error: %s" %
-                           disconnect_response.get("error"))
+            self.log.debug(
+                "Disconnect call failed with error: %s"
+                % disconnect_response.get("error")
+            )
             return False
 
     # TODO(fxb/64657): Determine more stable solution to country code config on
@@ -894,41 +874,44 @@
         # Country code can be None, from antlion config.
         if desired_country_code:
             desired_country_code = desired_country_code.upper()
-            response = self.sl4f.regulatory_region_lib.setRegion(
-                desired_country_code)
-            if response.get('error'):
+            response = self.sl4f.regulatory_region_lib.setRegion(desired_country_code)
+            if response.get("error"):
                 raise FuchsiaDeviceError(
-                    'Failed to set regulatory domain. Err: %s' %
-                    response['error'])
+                    "Failed to set regulatory domain. Err: %s" % response["error"]
+                )
 
             phy_list_response = self.sl4f.wlan_lib.wlanPhyIdList()
-            if phy_list_response.get('error'):
+            if phy_list_response.get("error"):
                 raise FuchsiaDeviceError(
-                    f'Failed to get phy list. Err: {response["error"]}')
-            phy_list = phy_list_response.get('result')
+                    f'Failed to get phy list. Err: {response["error"]}'
+                )
+            phy_list = phy_list_response.get("result")
             if not phy_list:
-                raise FuchsiaDeviceError('No phy available in phy list')
+                raise FuchsiaDeviceError("No phy available in phy list")
             phy_id = phy_list[0]
 
             end_time = time.time() + FUCHSIA_COUNTRY_CODE_TIMEOUT
             while time.time() < end_time:
-                ascii_cc = self.sl4f.wlan_lib.wlanGetCountry(phy_id).get(
-                    'result')
+                ascii_cc = self.sl4f.wlan_lib.wlanGetCountry(phy_id).get("result")
                 # Convert ascii_cc to string, then compare
-                if ascii_cc and (''.join(chr(c) for c in ascii_cc).upper()
-                                 == desired_country_code):
-                    self.log.debug('Country code successfully set to %s.' %
-                                   desired_country_code)
+                if ascii_cc and (
+                    "".join(chr(c) for c in ascii_cc).upper() == desired_country_code
+                ):
+                    self.log.debug(
+                        "Country code successfully set to %s." % desired_country_code
+                    )
                     return
-                self.log.debug('Country code not yet updated. Retrying.')
+                self.log.debug("Country code not yet updated. Retrying.")
                 time.sleep(1)
-            raise FuchsiaDeviceError('Country code never updated to %s' %
-                                     desired_country_code)
+            raise FuchsiaDeviceError(
+                "Country code never updated to %s" % desired_country_code
+            )
 
     def stop_services(self):
-        """Stops the ffx daemon and deletes SL4F property."""
-        self.log.info('Stopping host device services.')
+        """Stops ffx daemon, deletes SSH property, and deletes SL4F property."""
+        self.log.info("Stopping host device services.")
         del self.sl4f
+        del self.ssh
         del self.ffx
 
     def load_config(self, config):
@@ -947,24 +930,25 @@
                 specified, the current time will be used.
         """
         if test_name:
-            self.log.info(
-                f"Taking snapshot of {self.mdns_name} for {test_name}")
+            self.log.info(f"Taking snapshot of {self.mdns_name} for {test_name}")
         else:
             self.log.info(f"Taking snapshot of {self.mdns_name}")
 
         epoch = begin_time if begin_time else utils.get_current_epoch_time()
         time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(epoch))
+            acts_logger.epoch_to_log_line_timestamp(epoch)
+        )
         out_dir = context.get_current_context().get_full_output_path()
-        out_path = os.path.join(out_dir, f'{self.mdns_name}_{time_stamp}.zip')
+        out_path = os.path.join(out_dir, f"{self.mdns_name}_{time_stamp}.zip")
 
         try:
             subprocess.run(
                 [f"ssh -F {self.ssh_config} {self.ip} snapshot > {out_path}"],
-                shell=True)
-            self.log.info(f'Snapshot saved to {out_path}')
+                shell=True,
+            )
+            self.log.info(f"Snapshot saved to {out_path}")
         except Exception as err:
-            self.log.error(f'Failed to take snapshot: {err}')
+            self.log.error(f"Failed to take snapshot: {err}")
 
     def take_bt_snoop_log(self, custom_name=None):
         """Takes a the bt-snoop log from the device and stores it in a file
@@ -972,16 +956,19 @@
         """
         bt_snoop_path = context.get_current_context().get_full_output_path()
         time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(time.time()))
+            acts_logger.epoch_to_log_line_timestamp(time.time())
+        )
         out_name = "FuchsiaDevice%s_%s" % (
-            self.serial, time_stamp.replace(" ", "_").replace(":", "-"))
+            self.serial,
+            time_stamp.replace(" ", "_").replace(":", "-"),
+        )
         out_name = "%s.pcap" % out_name
         if custom_name:
             out_name = "%s_%s.pcap" % (self.serial, custom_name)
         else:
             out_name = "%s.pcap" % out_name
         full_out_path = os.path.join(bt_snoop_path, out_name)
-        bt_snoop_data = self.ssh.run('bt-snoop-cli -d -f pcap').raw_stdout
-        bt_snoop_file = open(full_out_path, 'wb')
+        bt_snoop_data = self.ssh.run("bt-snoop-cli -d -f pcap").raw_stdout
+        bt_snoop_file = open(full_out_path, "wb")
         bt_snoop_file.write(bt_snoop_data)
         bt_snoop_file.close()
diff --git a/src/antlion/controllers/fuchsia_lib/audio_lib.py b/src/antlion/controllers/fuchsia_lib/audio_lib.py
deleted file mode 100644
index 02d974d..0000000
--- a/src/antlion/controllers/fuchsia_lib/audio_lib.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import logger
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-import base64
-
-
-class FuchsiaAudioLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "audio")
-
-    def startOutputSave(self):
-        """Starts saving audio output on the device
-
-        Returns:
-            Dictionary is success, error if error.
-        """
-        test_cmd = "audio_facade.StartOutputSave"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def stopOutputSave(self):
-        """Stops saving audio output on the device
-
-        Returns:
-            Dictionary is success, error if error.
-        """
-        test_cmd = "audio_facade.StopOutputSave"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getOutputAudio(self, save_path):
-        """Gets the saved audio in base64 encoding. Use base64.b64decode.
-
-        Args:
-            save_path: The path to save the raw audio
-
-        Returns:
-            True if success, False if error.
-        """
-        test_cmd = "audio_facade.GetOutputAudio"
-        test_args = {}
-
-        result = self.send_command(test_cmd, test_args)
-        if result.get("error") is not None:
-            self.log.error("Failed to get recorded audio.")
-            return False
-
-        f = open(save_path, "wb")
-        f.write(base64.b64decode(result.get('result')))
-        f.close()
-        self.log.info("Raw audio file captured at {}".format(save_path))
-        return True
diff --git a/src/antlion/controllers/fuchsia_lib/base_lib.py b/src/antlion/controllers/fuchsia_lib/base_lib.py
index 42da2ea..ea7f96e 100644
--- a/src/antlion/controllers/fuchsia_lib/base_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/base_lib.py
@@ -18,11 +18,9 @@
 import socket
 
 from typing import Any, Mapping
-from urllib.parse import urlparse
 from urllib.request import Request, urlopen
 
-from antlion import logger, utils
-from antlion.libs.proc import job
+from antlion import logger
 
 DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC = 30
 
@@ -35,17 +33,18 @@
     """A SL4F command to the server failed."""
 
 
-class BaseLib():
-
+class BaseLib:
     def __init__(self, addr: str, logger_tag: str) -> None:
         self.address = addr
-        self.log = logger.create_tagged_trace_logger(f"SL4F | {self.address} | {logger_tag}")
+        self.log = logger.create_tagged_trace_logger(
+            f"SL4F | {self.address} | {logger_tag}"
+        )
 
     def send_command(
         self,
         cmd: str,
         args: Mapping[str, Any],
-        response_timeout: int = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC
+        response_timeout: int = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC,
     ) -> Mapping[str, Any]:
         """Builds and sends a JSON command to SL4F server.
 
@@ -67,30 +66,26 @@
             # currently used.
             "id": "",
             "method": cmd,
-            "params": args
+            "params": args,
         }
         data_json = json.dumps(data).encode("utf-8")
-        req = Request(self.address,
-                      data=data_json,
-                      headers={
-                          "Content-Type": "application/json; charset=utf-8",
-                          "Content-Length": len(data_json),
-                      })
+        req = Request(
+            self.address,
+            data=data_json,
+            headers={
+                "Content-Type": "application/json; charset=utf-8",
+                "Content-Length": len(data_json),
+            },
+        )
 
         self.log.debug(f'Sending request "{cmd}" with {args}')
         try:
             response = urlopen(req, timeout=response_timeout)
-        except (TimeoutError, socket.timeout) as e:
-            host = urlparse(self.address).hostname
-            if not utils.can_ping(job, host):
-                raise DeviceOffline(
-                    f'FuchsiaDevice {host} is not reachable via the network.')
-            if type(e) == socket.timeout:
-                # socket.timeout was aliased to TimeoutError in Python 3.10. For
-                # older versions of Python, we need to cast to TimeoutError to
-                # provide a version-agnostic API.
-                raise TimeoutError("socket timeout") from e
-            raise e
+        except socket.timeout as e:
+            # socket.timeout was aliased to TimeoutError in Python 3.10. For
+            # older versions of Python, we need to cast to TimeoutError to
+            # provide a version-agnostic API.
+            raise TimeoutError("socket timeout") from e
 
         response_body = response.read().decode("utf-8")
         try:
diff --git a/src/antlion/controllers/fuchsia_lib/basemgr_lib.py b/src/antlion/controllers/fuchsia_lib/basemgr_lib.py
deleted file mode 100644
index 9e50e1e..0000000
--- a/src/antlion/controllers/fuchsia_lib/basemgr_lib.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-COMMAND_RESTART_SESSION = 'basemgr_facade.RestartSession'
-COMMAND_START_BASEMGR = 'basemgr_facade.StartBasemgr'
-COMMAND_KILL_BASEMGR = 'basemgr_facade.KillBasemgr'
-
-
-class FuchsiaBasemgrLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "basemgr")
-
-    def restartSession(self):
-        """Restarts an ongoing basemgr session
-
-        Returns:
-            Dictionary:
-                error: None, unless an error occurs
-                result: 'Success', 'NoSessionToRestart', or None if error
-        """
-        test_cmd = COMMAND_RESTART_SESSION
-
-        return self.send_command(test_cmd, {})
-
-    def startBasemgr(self):
-        """Starts basemgr service
-
-        Returns:
-            Dictionary:
-                error: None, unless an error occurs
-                result: 'Success' or None if error
-        """
-        test_cmd = COMMAND_START_BASEMGR
-
-        return self.send_command(test_cmd, {})
-
-    def killBasemgr(self):
-        """Kill basemgr service, if one is running
-
-        Returns:
-            Dictionary:
-                error: None, unless an error occurs
-                result: 'Success', 'NoBasemgrToKill', or None if error
-        """
-        test_cmd = COMMAND_KILL_BASEMGR
-
-        return self.send_command(test_cmd, {})
diff --git a/src/antlion/controllers/fuchsia_lib/bt/__init__.py b/src/antlion/controllers/fuchsia_lib/bt/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/bt/avdtp_lib.py b/src/antlion/controllers/fuchsia_lib/bt/avdtp_lib.py
deleted file mode 100644
index 0af61d2..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/avdtp_lib.py
+++ /dev/null
@@ -1,229 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaAvdtpLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "avdtp")
-
-    def init(self, initiator_delay=None):
-        """Initializes the AVDTP service with optional initiator_delay.
-
-        Args:
-            initiator_delay: Optional. The delay in milliseconds to start a
-            stream.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpInit"
-        test_args = {"initiator_delay": initiator_delay}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getConnectedPeers(self):
-        """Gets the AVDTP connected peers.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetConnectedPeers"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setConfiguration(self, peer_id):
-        """Sends the AVDTP command to input peer_id: set configuration
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpSetConfiguration"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getConfiguration(self, peer_id):
-        """Sends the AVDTP command to input peer_id: get configuration
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetConfiguration"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getCapabilities(self, peer_id):
-        """Sends the AVDTP command to input peer_id: get capabilities
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetCapabilities"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getAllCapabilities(self, peer_id):
-        """Sends the AVDTP command to input peer_id: get all capabilities
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetAllCapabilities"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def reconfigureStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: reconfigure stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpReconfigureStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def suspendStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: suspend stream
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpSuspendStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def suspendAndReconfigure(self, peer_id):
-        """Sends the AVDTP command to input peer_id: suspend and reconfigure
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpSuspendAndReconfigure"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def releaseStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: release stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpReleaseStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def establishStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: establish stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpEstablishStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def startStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: start stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpStartStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def abortStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: abort stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpAbortStream"
-        test_args = {"identifier": peer_id}
-        test_id = self.build_id(self.test_counter)
-        self.test_counter += 1
-
-        return self.send_command(test_cmd, test_args)
-
-    def establishStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: establish stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpEstablishStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self):
-        """Removes the AVDTP service from the Fuchsia device
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpRemoveService"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/ble_lib.py b/src/antlion/controllers/fuchsia_lib/bt/ble_lib.py
deleted file mode 100644
index 1d7e622..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/ble_lib.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-import uuid
-
-
-class FuchsiaBleLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "ble")
-
-    def _convert_human_readable_uuid_to_byte_list(self, readable_uuid):
-        """Converts a readable uuid to a byte list.
-
-        Args:
-            readable_uuid: string, A readable uuid in the format:
-                Input: "00001101-0000-1000-8000-00805f9b34fb"
-                Output: ['fb', '34', '9b', '5f', '80', '00', '00', '80', '00',
-                         '10', '00', '00', '01', '11', '00', '00']
-
-        Returns:
-            A byte list representing the readable uuid.
-        """
-        hex_uuid_str = uuid.UUID(readable_uuid).hex
-        break_n_bytes = 2
-        byte_list = [
-            hex_uuid_str[i:i + break_n_bytes]
-            for i in range(0, len(hex_uuid_str), break_n_bytes)
-        ]
-        byte_list.reverse()
-        return byte_list
-
-    def bleStopBleAdvertising(self):
-        """BleStopAdvertising command
-
-        Returns:
-            Dictionary, None if success, error string if error.
-        """
-        test_cmd = "ble_advertise_facade.BleStopAdvertise"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleStartBleAdvertising(self,
-                               advertising_data,
-                               scan_response,
-                               interval,
-                               connectable=True):
-        """BleStartAdvertising command
-
-        Args:
-            advertising_data: dictionary, advertising data required for ble
-                advertise.
-            scan_response: dictionary, optional scan respones data to send.
-            interval: int, Advertising interval (in ms).
-            connectable: bool, whether the advertisement is connectable or not.
-
-        Returns:
-            Dictionary, None if success, error string if error.
-        """
-        test_cmd = "ble_advertise_facade.BleAdvertise"
-        service_uuid_list = None
-        if type(advertising_data['service_uuids']) == list:
-            service_uuid_list = []
-            for single_uuid in advertising_data['service_uuids']:
-                service_uuid_list.append(
-                    self._convert_human_readable_uuid_to_byte_list(
-                        single_uuid))
-            advertising_data['service_uuids'] = service_uuid_list
-
-        service_uuid_list = None
-        if scan_response and type(scan_response['service_uuids']) == list:
-            service_uuid_list = []
-            for single_uuid in scan_response['service_uuids']:
-                service_uuid_list.append(
-                    self._convert_human_readable_uuid_to_byte_list(
-                        single_uuid))
-            scan_response['service_uuids'] = service_uuid_list
-
-        if scan_response and type(scan_response['service_data']) == list:
-            for service_data in scan_response['service_data']:
-                service_data[
-                    "uuid"] = self._convert_human_readable_uuid_to_byte_list(
-                        service_data["uuid"])
-
-        if type(advertising_data['service_data']) == list:
-            for service_data in advertising_data['service_data']:
-                service_data[
-                    "uuid"] = self._convert_human_readable_uuid_to_byte_list(
-                        service_data["uuid"])
-
-        test_args = {
-            "advertising_data": advertising_data,
-            "scan_response": scan_response,
-            "interval_ms": interval,
-            "connectable": connectable
-        }
-        return self.send_command(test_cmd, test_args)
-
-    def blePublishService(self, primary, type_, service_id):
-        """Publishes services specified by input args
-
-        Args:
-            primary: bool, Flag of service.
-            type: string, Canonical 8-4-4-4-12 uuid of service.
-            service_proxy_key: string, Unique identifier to specify where to publish service
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bluetooth.BlePublishService"
-        test_args = {
-            "primary": primary,
-            "type": type_,
-            "local_service_id": service_id
-        }
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/bts_lib.py b/src/antlion/controllers/fuchsia_lib/bt/bts_lib.py
deleted file mode 100644
index 6a94c6b..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/bts_lib.py
+++ /dev/null
@@ -1,227 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaBtsLib(BaseLib):
-    # Class representing the Bluetooth Access Library.
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "bt_sys")
-
-    def setDiscoverable(self, discoverable):
-        """Sets the device to be discoverable over BR/EDR.
-
-        Args:
-            discoverable: A bool object for setting Bluetooth
-              device discoverable or not.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothSetDiscoverable"
-        test_args = {"discoverable": discoverable}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setName(self, name):
-        """Sets the local Bluetooth name of the device.
-
-        Args:
-            name: A string that represents the name to set.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothSetName"
-        test_args = {"name": name}
-
-        return self.send_command(test_cmd, test_args)
-
-    def inputPairingPin(self, pin):
-        """Inputs the pairing pin to the Fuchsia devices' pairing delegate.
-
-        Args:
-            pin: A string that represents the pin to input.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothInputPairingPin"
-        test_args = {"pin": pin}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getPairingPin(self):
-        """Gets the pairing pin from the Fuchsia devices' pairing delegate.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothGetPairingPin"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initBluetoothSys(self):
-        """Initialises the Bluetooth sys Interface proxy in SL4F.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothInitSys"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def requestDiscovery(self, discovery):
-        """Start or stop Bluetooth Control device discovery.
-
-        Args:
-            discovery: A bool object representing starting or stopping
-              device discovery.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothRequestDiscovery"
-        test_args = {"discovery": discovery}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getKnownRemoteDevices(self):
-        """Get known remote BR/EDR and LE devices.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothGetKnownRemoteDevices"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def forgetDevice(self, identifier):
-        """Forgets a devices pairing.
-
-        Args:
-            identifier: A string representing the device id.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothForgetDevice"
-        test_args = {"identifier": identifier}
-
-        return self.send_command(test_cmd, test_args)
-
-    def disconnectDevice(self, identifier):
-        """Disconnects a devices.
-
-        Args:
-            identifier: A string representing the device id.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothDisconnectDevice"
-        test_args = {"identifier": identifier}
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectDevice(self, identifier):
-        """Connects to a devices.
-
-        Args:
-            identifier: A string representing the device id.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothConnectDevice"
-        test_args = {"identifier": identifier}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getActiveAdapterAddress(self):
-        """Gets the current Active Adapter's address.
-
-        Returns:
-            Dictionary, String address if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothGetActiveAdapterAddress"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def pair(self, identifier, pairing_security_level, non_bondable,
-             transport):
-        """Pairs to a device.
-
-        Args:
-            identifier: A string representing the device id.
-            pairing_security_level: The security level required for this pairing request
-                represented as a u64. (Only for LE pairing)
-                Available Values
-                1 - ENCRYPTED: Encrypted without MITM protection (unauthenticated)
-                2 - AUTHENTICATED: Encrypted with MITM protection (authenticated).
-                None: No pairing security level.
-            non_bondable: A bool representing whether the pairing mode is bondable or not. None is
-                also accepted. False if bondable, True if non-bondable.
-            transport: A u64 representing the transport type.
-                Available Values
-                1 - BREDR: Classic BR/EDR transport
-                2 - LE: Bluetooth Low Energy Transport
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothPairDevice"
-        test_args = {
-            "identifier": identifier,
-            "pairing_security_level": pairing_security_level,
-            "non_bondable": non_bondable,
-            "transport": transport,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def acceptPairing(self,
-                      input_capabilities="NONE",
-                      output_capabilities="NONE"):
-        """Accepts incoming pairing requests.
-
-        Args:
-            input: String - The input I/O capabilities to use
-                Available Values:
-                NONE - Input capability type None
-                CONFIRMATION - Input capability type confirmation
-                KEYBOARD - Input capability type Keyboard
-            output: String - The output I/O Capabilities to use
-                Available Values:
-                NONE - Output capability type None
-                DISPLAY - output capability type Display
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothAcceptPairing"
-        test_args = {
-            "input": input_capabilities,
-            "output": output_capabilities,
-        }
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/gattc_lib.py b/src/antlion/controllers/fuchsia_lib/bt/gattc_lib.py
deleted file mode 100644
index b8630d9..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/gattc_lib.py
+++ /dev/null
@@ -1,350 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaGattcLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "gatt_client")
-
-    def bleStartBleScan(self, scan_filter):
-        """Starts a BLE scan
-
-        Args:
-            scan_time_ms: int, Amount of time to scan for.
-            scan_filter: dictionary, Device filter for a scan.
-            scan_count: int, Number of devices to scan for before termination.
-
-        Returns:
-            None if pass, err if fail.
-        """
-        test_cmd = "gatt_client_facade.BleStartScan"
-        test_args = {
-            "filter": scan_filter,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleStopBleScan(self):
-        """Stops a BLE scan
-
-        Returns:
-            Dictionary, List of devices discovered, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleStopScan"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def listServices(self, id):
-        """Lists services of a peripheral specified by id.
-
-        Args:
-            id: string, Peripheral identifier to list services.
-
-        Returns:
-            Dictionary, List of Service Info if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcListServices"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleGetDiscoveredDevices(self):
-        """Stops a BLE scan
-
-        Returns:
-            Dictionary, List of devices discovered, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleGetDiscoveredDevices"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def discoverCharacteristics(self):
-        """Discover the characteristics of a connected service.
-
-        Returns:
-            Dictionary, List of Characteristics and Descriptors if success,
-            error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcDiscoverCharacteristics"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeCharById(self, id, offset, write_value):
-        """Write Characteristic by id..
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset of bytes to write to.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteCharacteristicById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeLongCharById(self, id, offset, write_value, reliable_mode=False):
-        """Write Characteristic by id.
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset of bytes to write to.
-            write_value: byte array, The bytes to write.
-            reliable_mode: bool value representing reliable writes.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteLongCharacteristicById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "write_value": write_value,
-            "reliable_mode": reliable_mode
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeLongDescById(self, id, offset, write_value):
-        """Write Descriptor by id.
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset of bytes to write to.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteLongDescriptorById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeCharByIdWithoutResponse(self, id, write_value):
-        """Write Characteristic by id without response.
-
-        Args:
-            id: string, Characteristic identifier.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteCharacteristicByIdWithoutResponse"
-        test_args = {
-            "identifier": id,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def enableNotifyCharacteristic(self, id):
-        """Enable notifications on a Characteristic.
-
-        Args:
-            id: string, Characteristic identifier.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcEnableNotifyCharacteristic"
-        test_args = {
-            "identifier": id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def disableNotifyCharacteristic(self, id):
-        """Disable notifications on a Characteristic.
-
-        Args:
-            id: string, Characteristic identifier.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcDisableNotifyCharacteristic"
-        test_args = {
-            "identifier": id,
-            "value": False,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readCharacteristicById(self, id):
-        """Read Characteristic value by id..
-
-        Args:
-            id: string, Characteristic identifier.
-
-        Returns:
-            Characteristic value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadCharacteristicById"
-        test_args = {
-            "identifier": id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readCharacteristicByType(self, uuid):
-        """Read Characteristic value by id..
-
-        Args:
-            uuid: string, Characteristic identifier.
-
-        Returns:
-            Characteristic value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadCharacteristicByType"
-        test_args = {
-            "uuid": uuid,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readDescriptorById(self, id):
-        """Read Descriptor value by id..
-
-        Args:
-            id: string, Descriptor identifier.
-
-        Returns:
-            Descriptor value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadDescriptorById"
-        test_args = {
-            "identifier": id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readLongDescriptorById(self, id, offset, max_bytes):
-        """Reads Long Descriptor value by id.
-
-        Args:
-            id: string, Descriptor identifier.
-            offset: int, The offset to start reading from.
-            max_bytes: int, The max bytes to return.
-
-        Returns:
-            Descriptor value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadLongDescriptorById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "max_bytes": max_bytes
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeDescriptorById(self, id, offset, write_value):
-        """Write Descriptor by id.
-
-        Args:
-            id: string, Descriptor identifier.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteDescriptorById"
-        test_args = {
-            "identifier": id,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readLongCharacteristicById(self, id, offset, max_bytes):
-        """Reads Long Characteristic value by id.
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset to start reading from.
-            max_bytes: int, The max bytes to return.
-
-        Returns:
-            Characteristic value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadLongCharacteristicById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "max_bytes": max_bytes
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectToService(self, id, service_id):
-        """ Connect to a specific Service specified by id.
-
-        Args:
-            id: string, Service id.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcConnectToService"
-        test_args = {"identifier": id, "service_identifier": service_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleConnectToPeripheral(self, id):
-        """Connects to a peripheral specified by id.
-
-        Args:
-            id: string, Peripheral identifier to connect to.
-
-        Returns:
-            Dictionary, List of Service Info if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleConnectPeripheral"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleDisconnectPeripheral(self, id):
-        """Disconnects from a peripheral specified by id.
-
-        Args:
-            id: string, Peripheral identifier to disconnect from.
-
-        Returns:
-            Dictionary, None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleDisconnectPeripheral"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/gatts_lib.py b/src/antlion/controllers/fuchsia_lib/bt/gatts_lib.py
deleted file mode 100644
index 5f9ecb4..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/gatts_lib.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaGattsLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "gatt_server")
-
-    def publishServer(self, database):
-        """Publishes services specified by input args
-
-        Args:
-            database: A database that follows the conventions of
-                acts_contrib.test_utils.bt.gatt_test_database.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "gatt_server_facade.GattServerPublishServer"
-        test_args = {
-            "database": database,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def closeServer(self):
-        """Closes an active GATT server.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "gatt_server_facade.GattServerCloseServer"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/hfp_lib.py b/src/antlion/controllers/fuchsia_lib/bt/hfp_lib.py
deleted file mode 100644
index e8f68f3..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/hfp_lib.py
+++ /dev/null
@@ -1,420 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaHfpLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "hfp")
-
-    def init(self):
-        """Initializes the HFP service.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.HfpInit"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self):
-        """Removes the HFP service from the Fuchsia device
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.HfpRemoveService"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def listPeers(self):
-        """List all connected HFP peer devices.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ListPeers"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setActivePeer(self, peer_id):
-        """Set the active HFP peer device. All peer specific commands will be
-        directed to this device.
-
-        Args:
-            peer_id: The id of the peer to set as active. Use "listPeers" to
-            find connected peer ids.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetActivePeer"
-        test_args = {"peer_id": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def listCalls(self):
-        """List all calls known to the sl4f component.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ListCalls"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def newCall(self, remote, state, direction):
-        """Opens a new call channel and alerts the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-            state: The state of the call.
-            direction: The direction of the call. Can be "incoming" or "outgoing".
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.NewCall"
-        test_args = {"remote": remote, "state": state, "direction": direction}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initiateIncomingCall(self, remote):
-        """Opens an incoming call channel and alerts the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.IncomingCall"
-        test_args = {"remote": remote}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initiateIncomingWaitingCall(self, remote):
-        """Opens an incoming call when there is an onging call and alerts
-        the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.IncomingWaitingCall"
-        test_args = {"remote": remote}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initiateOutgoingCall(self, remote):
-        """Opens an outgoing call channel and alerts the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.OutgoingCall"
-        test_args = {"remote": remote}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallActive(self, call_id):
-        """Sets the specified call to the "OngoingActive" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallActive"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallHeld(self, call_id):
-        """Sets the specified call to the "OngoingHeld" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallHeld"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallTerminated(self, call_id):
-        """Sets the specified call to the "Terminated" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallTerminated"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallTransferredToAg(self, call_id):
-        """Sets the specified call to the "TransferredToAg" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallTransferredToAg"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setSpeakerGain(self, value):
-        """Sets the active peer's speaker gain.
-
-        Args:
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetSpeakerGain"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setMicrophoneGain(self, value):
-        """Sets the active peer's microphone gain.
-
-        Args:
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetMicrophoneGain"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setServiceAvailable(self, value):
-        """Sets the simulated network service status reported by the call manager.
-
-        Args:
-            value: True to set the network service to available.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetServiceAvailable"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setRoaming(self, value):
-        """Sets the simulated roaming status reported by the call manager.
-
-        Args:
-            value: True to set the network connection to roaming.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetRoaming"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setSignalStrength(self, value):
-        """Sets the simulated signal strength reported by the call manager.
-
-        Args:
-            value: The signal strength value to set. Must be between 0-5 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetSignalStrength"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setSubscriberNumber(self, value):
-        """Sets the subscriber number reported by the call manager.
-
-        Args:
-            value: The subscriber number to set. Maximum length 128 characters.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetSubscriberNumber"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setOperator(self, value):
-        """Sets the operator value reported by the call manager.
-
-        Args:
-            value: The operator value to set. Maximum length 16 characters.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetOperator"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setNrecSupport(self, value):
-        """Sets the noise reduction/echo cancelation support reported by the call manager.
-
-        Args:
-            value: The nrec support bool.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetNrecSupport"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setBatteryLevel(self, value):
-        """Sets the battery level reported by the call manager.
-
-        Args:
-            value: The integer battery level value. Must be 0-5 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetBatteryLevel"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setLastDialed(self, number):
-        """Sets the last dialed number in the call manager.
-
-        Args:
-            number: The number of the remote party.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetLastDialed"
-        test_args = {"number": number}
-
-        return self.send_command(test_cmd, test_args)
-
-    def clearLastDialed(self):
-        """Clears the last dialed number in the call manager.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ClearLastDialed"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setMemoryLocation(self, location, number):
-        """Sets a memory location to point to a remote number.
-
-        Args:
-            location: The memory location at which to store the number.
-            number: The number of the remote party to be stored.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetMemoryLocation"
-        test_args = {"location": location, "number": number}
-
-        return self.send_command(test_cmd, test_args)
-
-    def clearMemoryLocation(self, location):
-        """Clear a memory location so that it no longer points to a remote
-        number.
-
-        Args:
-            localtion: The memory location to clear.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ClearMemoryLocation"
-        test_args = {"location": location}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setDialResult(self, number, status):
-        """Sets the status result to be returned when the number is dialed.
-
-        Args:
-            number: The number of the remote party.
-            status: The status to be returned when an outgoing call is
-                    initiated to the number.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetDialResult"
-        test_args = {"number": number, "status": status}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getState(self):
-        """Get the call manager's state.
-
-        Returns:
-            Dictionary, State dictionary if success, error if error.
-        """
-        test_cmd = "hfp_facade.GetState"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setConnectionBehavior(self, autoconnect):
-        """Set the Service Level Connection behavior when a new peer connects.
-
-        Args:
-            autoconnect: Enable/Disable autoconnection of SLC.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetConnectionBehavior"
-        test_args = {"autoconnect": autoconnect}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/rfcomm_lib.py b/src/antlion/controllers/fuchsia_lib/bt/rfcomm_lib.py
deleted file mode 100644
index 10f0736..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/rfcomm_lib.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaRfcommLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "rfcomm")
-
-    def init(self):
-        """Initializes the RFCOMM service.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.RfcommInit"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self):
-        """Removes the RFCOMM service from the Fuchsia device
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.RfcommRemoveService"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def disconnectSession(self, peer_id):
-        """Closes the RFCOMM Session with the remote peer
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.DisconnectSession"
-        test_args = {"peer_id": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectRfcommChannel(self, peer_id, server_channel_number):
-        """Makes an outgoing RFCOMM connection to the remote peer
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.ConnectRfcommChannel"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def disconnectRfcommChannel(self, peer_id, server_channel_number):
-        """Closes the RFCOMM channel with the remote peer
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.DisconnectRfcommChannel"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def sendRemoteLineStatus(self, peer_id, server_channel_number):
-        """Sends a Remote Line Status update to the remote peer for the provided channel number
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.SendRemoteLineStatus"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeRfcomm(self, peer_id, server_channel_number, data):
-        """Sends data to the remote peer over the RFCOMM channel
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.RfcommWrite"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number,
-            "data": data
-        }
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/sdp_lib.py b/src/antlion/controllers/fuchsia_lib/bt/sdp_lib.py
deleted file mode 100644
index 25dcc33..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/sdp_lib.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaProfileServerLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "profile_server")
-
-    def addService(self, record):
-        """Publishes an SDP service record specified by input args
-
-        Args:
-            record: A database that represents an SDP record to
-                be published.
-
-        Returns:
-            Dictionary, service id if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerAddService"
-        test_args = {
-            "record": record,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def addSearch(self, attribute_list, profile_id):
-        """Publishes services specified by input args
-
-        Args:
-            attribute_list: The list of attributes to set
-            profile_id: The profile ID to set.
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerAddSearch"
-        test_args = {
-            "attribute_list": attribute_list,
-            "profile_id": profile_id
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self, service_id):
-        """Removes a service.
-
-        Args:
-            record: A database that represents an SDP record to
-                be published.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerRemoveService"
-        test_args = {
-            "service_id": service_id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def init(self):
-        """Initializes the ProfileServerFacade's proxy object.
-
-        No operations for SDP can be performed until this is initialized.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerInit"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def cleanUp(self):
-        """Cleans up all objects related to SDP.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerCleanup"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectL2cap(self, identifier, psm, mode):
-        """ Sends an outgoing l2cap connection to a connected peer device.
-
-        Args:
-            psm: The psm value to connect over. Available PSMs:
-                SDP 0x0001  See Bluetooth Service Discovery Protocol (SDP)
-                RFCOMM  0x0003  See RFCOMM with TS 07.10
-                TCS-BIN 0x0005  See Bluetooth Telephony Control Specification /
-                    TCS Binary
-                TCS-BIN-CORDLESS    0x0007  See Bluetooth Telephony Control
-                    Specification / TCS Binary
-                BNEP    0x000F  See Bluetooth Network Encapsulation Protocol
-                HID_Control 0x0011  See Human Interface Device
-                HID_Interrupt   0x0013  See Human Interface Device
-                UPnP    0x0015  See [ESDP]
-                AVCTP   0x0017  See Audio/Video Control Transport Protocol
-                AVDTP   0x0019  See Audio/Video Distribution Transport Protocol
-                AVCTP_Browsing  0x001B  See Audio/Video Remote Control Profile
-                UDI_C-Plane 0x001D  See the Unrestricted Digital Information
-                    Profile [UDI]
-                ATT 0x001F  See Bluetooth Core Specification​
-                ​3DSP   0x0021​ ​​See 3D Synchronization Profile.
-                ​LE_PSM_IPSP    ​0x0023 ​See Internet Protocol Support Profile
-                    (IPSP)
-                OTS 0x0025  See Object Transfer Service (OTS)
-                EATT    0x0027  See Bluetooth Core Specification
-            mode: String - The channel mode to connect to. Available values:
-                Basic mode: BASIC
-                Enhanced Retransmission mode: ERTM
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerConnectL2cap"
-        test_args = {"identifier": identifier, "psm": psm, "mode": mode}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/device_lib.py b/src/antlion/controllers/fuchsia_lib/device_lib.py
new file mode 100644
index 0000000..f7ad6b6
--- /dev/null
+++ b/src/antlion/controllers/fuchsia_lib/device_lib.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion.controllers.fuchsia_lib.base_lib import BaseLib
+
+
+class DeviceLib(BaseLib):
+    def __init__(self, addr: str) -> None:
+        super().__init__(addr, "device")
+
+    def get_device_name(self) -> str:
+        """Get the device name."""
+
+        return self.send_command("device_facade.GetDeviceName", {})
+
+    def get_product_name(self) -> str:
+        """Get the product name."""
+
+        return self.send_command("device_facade.GetProduct", {})
+
+    def get_version(self):
+        """Get the device version."""
+
+        return self.send_command("device_facade.GetVersion", {})
diff --git a/src/antlion/controllers/fuchsia_lib/ffx.py b/src/antlion/controllers/fuchsia_lib/ffx.py
index ca05feb..3db6c8d 100644
--- a/src/antlion/controllers/fuchsia_lib/ffx.py
+++ b/src/antlion/controllers/fuchsia_lib/ffx.py
@@ -35,11 +35,10 @@
 class FFXError(signals.TestError):
     """Non-zero error code returned from a ffx command."""
 
-    def __init__(self, command: str,
-                 process: subprocess.CalledProcessError) -> None:
+    def __init__(self, command: str, process: subprocess.CalledProcessError) -> None:
         self.command = command
-        self.stdout: str = process.stdout.decode('utf-8', errors='replace')
-        self.stderr: str = process.stderr.decode('utf-8', errors='replace')
+        self.stdout: str = process.stdout.decode("utf-8", errors="replace")
+        self.stderr: str = process.stderr.decode("utf-8", errors="replace")
         self.exit_status = process.returncode
 
     def __str__(self) -> str:
@@ -61,11 +60,13 @@
         ssh_private_key_path: Path to Fuchsia DUT SSH private key.
     """
 
-    def __init__(self,
-                 binary_path: str,
-                 mdns_name: str,
-                 ip: str = None,
-                 ssh_private_key_path: str = None):
+    def __init__(
+        self,
+        binary_path: str,
+        mdns_name: str,
+        ip: str = None,
+        ssh_private_key_path: str = None,
+    ):
         """
         Args:
             binary_path: Path to ffx binary.
@@ -103,12 +104,13 @@
         self._has_been_reachable = False
         self._has_logged_version = False
 
-    def run(self,
-            command: str,
-            timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT,
-            skip_status_code_check: bool = False,
-            skip_reachability_check: bool = False
-            ) -> subprocess.CompletedProcess:
+    def run(
+        self,
+        command: str,
+        timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT,
+        skip_status_code_check: bool = False,
+        skip_reachability_check: bool = False,
+    ) -> subprocess.CompletedProcess:
         """Runs an ffx command.
 
         Verifies reachability before running, if it hasn't already.
@@ -135,13 +137,15 @@
             self.verify_reachable()
 
         self.log.debug(f'Running "{command}".')
-        full_command = f'{self.binary_path} -e {self._env_config_path} {command}'
+        full_command = f"{self.binary_path} -e {self._env_config_path} {command}"
 
         try:
-            result = subprocess.run(full_command.split(),
-                                    capture_output=True,
-                                    timeout=timeout_sec,
-                                    check=not skip_status_code_check)
+            result = subprocess.run(
+                full_command.split(),
+                capture_output=True,
+                timeout=timeout_sec,
+                check=not skip_status_code_check,
+            )
         except subprocess.CalledProcessError as e:
             raise FFXError(command, e) from e
         except subprocess.TimeoutExpired as e:
@@ -150,7 +154,7 @@
         return result
 
     def _create_isolated_environment(self) -> None:
-        """ Create a new isolated environment for ffx.
+        """Create a new isolated environment for ffx.
 
         This is needed to avoid overlapping ffx daemons while testing in
         parallel, causing the ffx invocations to “upgrade” one daemon to
@@ -163,8 +167,9 @@
         root_dir = context.get_current_context().get_full_output_path()
         epoch = utils.get_current_epoch_time()
         time_stamp = logger.normalize_log_line_timestamp(
-            logger.epoch_to_log_line_timestamp(epoch))
-        target_dir = os.path.join(root_dir, f'{self.mdns_name}_{time_stamp}')
+            logger.epoch_to_log_line_timestamp(epoch)
+        )
+        target_dir = os.path.join(root_dir, f"{self.mdns_name}_{time_stamp}")
         os.makedirs(target_dir, exist_ok=True)
 
         # Sockets need to be created in a different directory to be guaranteed
@@ -173,10 +178,8 @@
         self._sock_dir = tempfile.mkdtemp()
         # On MacOS, the socket paths need to be just paths (not pre-created
         # Python tempfiles, which are not socket files).
-        self._ssh_auth_sock_path = str(
-            PurePath(self._sock_dir, 'ssh_auth_sock'))
-        self._overnet_socket_path = str(
-            PurePath(self._sock_dir, 'overnet_socket'))
+        self._ssh_auth_sock_path = str(PurePath(self._sock_dir, "ssh_auth_sock"))
+        self._overnet_socket_path = str(PurePath(self._sock_dir, "overnet_socket"))
 
         config: MutableMapping[str, Any] = {
             "target": {
@@ -205,6 +208,14 @@
                     "disabled": True,
                 },
             },
+            # Prevent log collection from all devices the ffx daemon sees; only
+            # collect logs from the target device.
+            #
+            # TODO(https://fxbug.dev/118764): Consider re-enabling after
+            # resolution of the issue causing a reboot of the target device.
+            "proactive_log": {
+                "enabled": False,
+            },
         }
 
         if self.ip:
@@ -220,7 +231,7 @@
             config["ssh"]["priv"] = self.ssh_private_key_path
 
         config_path = os.path.join(target_dir, "ffx_config.json")
-        with open(config_path, 'w', encoding="utf-8") as f:
+        with open(config_path, "w", encoding="utf-8") as f:
             json.dump(config, f, ensure_ascii=False, indent=4)
 
         env = {
@@ -229,15 +240,13 @@
             "global": None,
         }
         self._env_config_path = os.path.join(target_dir, "ffx_env.json")
-        with open(self._env_config_path, 'w', encoding="utf-8") as f:
+        with open(self._env_config_path, "w", encoding="utf-8") as f:
             json.dump(env, f, ensure_ascii=False, indent=4)
 
         # The ffx daemon will started automatically when needed. There is no
         # need to start it manually here.
 
-    def verify_reachable(self,
-                         timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT
-                         ) -> None:
+    def verify_reachable(self, timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT) -> None:
         """Verify the target is reachable via RCS and various services.
 
         Blocks until the device allows for an RCS connection. If the device
@@ -271,7 +280,7 @@
                 self.run(cmd, timeout_sec=5, skip_reachability_check=True)
                 break
             except FFXError as e:
-                if 'took too long connecting to ascendd socket' in e.stderr:
+                if "took too long connecting to ascendd socket" in e.stderr:
                     err = e
                 else:
                     raise e
@@ -280,20 +289,21 @@
 
             if time.perf_counter() > timeout:
                 raise FFXTimeout(
-                    f'Waited over {timeout_sec}s for ffx to become reachable'
+                    f"Waited over {timeout_sec}s for ffx to become reachable"
                 ) from err
 
         # Use a shorter timeout than default because device information
         # gathering can hang for a long time if the device is not actually
         # connectable.
         try:
-            result = self.run("target show --json",
-                              timeout_sec=15,
-                              skip_reachability_check=True)
+            result = self.run(
+                "target show --json", timeout_sec=15, skip_reachability_check=True
+            )
         except Exception as e:
             self.log.error(
                 f'Failed to reach target device. Try running "{self.binary_path}'
-                + ' doctor" to diagnose issues.')
+                + ' doctor" to diagnose issues.'
+            )
             raise e
 
         self._has_been_reachable = True
@@ -302,8 +312,7 @@
             self._has_logged_version = True
             self.compare_version(result)
 
-    def compare_version(
-            self, target_show_result: subprocess.CompletedProcess) -> None:
+    def compare_version(self, target_show_result: subprocess.CompletedProcess) -> None:
         """Compares the version of Fuchsia with the version of ffx.
 
         Args:
@@ -311,17 +320,17 @@
                 output mode enabled
         """
         result_json = json.loads(target_show_result.stdout)
-        build_info = next(
-            filter(lambda s: s.get('label') == 'build', result_json))
+        build_info = next(filter(lambda s: s.get("label") == "build", result_json))
         version_info = next(
-            filter(lambda s: s.get('label') == 'version', build_info['child']))
-        device_version = version_info.get('value')
-        ffx_version = self.run("version").stdout.decode('utf-8')
+            filter(lambda s: s.get("label") == "version", build_info["child"])
+        )
+        device_version = version_info.get("value")
+        ffx_version = self.run("version").stdout.decode("utf-8")
 
-        self.log.info(
-            f"Device version: {device_version}, ffx version: {ffx_version}")
+        self.log.info(f"Device version: {device_version}, ffx version: {ffx_version}")
         if device_version != ffx_version:
             self.log.warning(
-                "ffx versions that differ from device versions may" +
-                " have compatibility issues. It is recommended to" +
-                " use versions within 6 weeks of each other.")
+                "ffx versions that differ from device versions may"
+                + " have compatibility issues. It is recommended to"
+                + " use versions within 6 weeks of each other."
+            )
diff --git a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py b/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
index d6e030e..30af9a8 100644
--- a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
@@ -23,18 +23,19 @@
 
 
 class FuchsiaHardwarePowerStatecontrolLib(base_lib.BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "hardware_power_statecontrol")
 
     def send_command(self, test_cmd, test_args, response_timeout=30):
         """Wrap send_command to allow disconnects after sending the request."""
         try:
-            response = super().send_command(test_cmd, test_args,
-                                            response_timeout)
-        except (TimeoutError, http.client.RemoteDisconnected,
-                base_lib.DeviceOffline) as e:
-            logging.warn(f'Error while sending power command: {e}')
+            response = super().send_command(test_cmd, test_args, response_timeout)
+        except (
+            TimeoutError,
+            http.client.RemoteDisconnected,
+            base_lib.DeviceOffline,
+        ) as e:
+            logging.warn(f"Error while sending power command: {e}")
             return
         return response
 
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
index 9910be0..0ff858c 100644
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
+++ b/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
@@ -28,7 +28,8 @@
     def __init__(self, fuchsia_device):
         self.device = fuchsia_device
         self.log = logger.create_tagged_trace_logger(
-            'NetstackController for FuchsiaDevice | %s' % self.device.ip)
+            "NetstackController for FuchsiaDevice | %s" % self.device.ip
+        )
 
     def list_interfaces(self):
         """Retrieve netstack interfaces from netstack facade
@@ -38,8 +39,8 @@
             information
         """
         response = self.device.sl4f.netstack_lib.netstackListInterfaces()
-        if response.get('error'):
+        if response.get("error"):
             raise NetstackControllerError(
-                'Failed to get network interfaces list: %s' %
-                response['error'])
-        return response['result']
+                "Failed to get network interfaces list: %s" % response["error"]
+            )
+        return response["result"]
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
index 176d54e..922b167 100644
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
+++ b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
@@ -34,7 +34,8 @@
     def __init__(self, fuchsia_device):
         self.device = fuchsia_device
         self.log = logger.create_tagged_trace_logger(
-            'WlanController for FuchsiaDevice | %s' % self.device.ip)
+            "WlanController for FuchsiaDevice | %s" % self.device.ip
+        )
 
     # TODO(70501): Wrap wlan_lib functions and setup from FuchsiaDevice here
     # (similar to how WlanPolicyController does it) to prevent FuchsiaDevice
@@ -46,25 +47,26 @@
         pass
 
     def update_wlan_interfaces(self):
-        """ Retrieves WLAN interfaces from device and sets the FuchsiaDevice
+        """Retrieves WLAN interfaces from device and sets the FuchsiaDevice
         attributes.
         """
         wlan_interfaces = self.get_interfaces_by_role()
-        self.device.wlan_client_interfaces = wlan_interfaces['client']
-        self.device.wlan_ap_interfaces = wlan_interfaces['ap']
+        self.device.wlan_client_interfaces = wlan_interfaces["client"]
+        self.device.wlan_ap_interfaces = wlan_interfaces["ap"]
 
         # Set test interfaces to value from config, else the first found
         # interface, else None
         self.device.wlan_client_test_interface_name = self.device.conf_data.get(
-            'wlan_client_test_interface',
-            next(iter(self.device.wlan_client_interfaces), None))
+            "wlan_client_test_interface",
+            next(iter(self.device.wlan_client_interfaces), None),
+        )
 
         self.device.wlan_ap_test_interface_name = self.device.conf_data.get(
-            'wlan_ap_test_interface',
-            next(iter(self.device.wlan_ap_interfaces), None))
+            "wlan_ap_test_interface", next(iter(self.device.wlan_ap_interfaces), None)
+        )
 
     def get_interfaces_by_role(self):
-        """ Retrieves WLAN interface information, supplimented by netstack info.
+        """Retrieves WLAN interface information, supplimented by netstack info.
 
         Returns:
             Dict with keys 'client' and 'ap', each of which contain WLAN
@@ -73,32 +75,32 @@
 
         # Retrieve WLAN interface IDs
         response = self.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if response.get('error'):
-            raise WlanControllerError('Failed to get WLAN iface ids: %s' %
-                                      response['error'])
+        if response.get("error"):
+            raise WlanControllerError(
+                "Failed to get WLAN iface ids: %s" % response["error"]
+            )
 
-        wlan_iface_ids = response.get('result', [])
+        wlan_iface_ids = response.get("result", [])
         if len(wlan_iface_ids) < 1:
-            return {'client': {}, 'ap': {}}
+            return {"client": {}, "ap": {}}
 
         # Use IDs to get WLAN interface info and mac addresses
         wlan_ifaces_by_mac = {}
         for id in wlan_iface_ids:
             response = self.device.sl4f.wlan_lib.wlanQueryInterface(id)
-            if response.get('error'):
+            if response.get("error"):
                 raise WlanControllerError(
-                    'Failed to query wlan iface id %s: %s' %
-                    (id, response['error']))
+                    "Failed to query wlan iface id %s: %s" % (id, response["error"])
+                )
 
-            mac = response['result'].get('sta_addr', None)
+            mac = response["result"].get("sta_addr", None)
             if mac is None:
                 # Fallback to older field name to maintain backwards
                 # compatibility with older versions of SL4F's
                 # QueryIfaceResponse. See https://fxrev.dev/562146.
-                mac = response['result'].get('mac_addr')
+                mac = response["result"].get("mac_addr")
 
-            wlan_ifaces_by_mac[utils.mac_address_list_to_str(
-                mac)] = response['result']
+            wlan_ifaces_by_mac[utils.mac_address_list_to_str(mac)] = response["result"]
 
         # Use mac addresses to query the interfaces from the netstack view,
         # which allows us to supplement the interface information with the name,
@@ -109,21 +111,21 @@
         # to reconcile some of the information between the two perspectives, at
         # which point we can eliminate step.
         net_ifaces = self.device.netstack_controller.list_interfaces()
-        wlan_ifaces_by_role = {'client': {}, 'ap': {}}
+        wlan_ifaces_by_role = {"client": {}, "ap": {}}
         for iface in net_ifaces:
             try:
                 # Some interfaces might not have a MAC
-                iface_mac = utils.mac_address_list_to_str(iface['mac'])
+                iface_mac = utils.mac_address_list_to_str(iface["mac"])
             except Exception as e:
-                self.log.debug(f'Error {e} getting MAC for iface {iface}')
+                self.log.debug(f"Error {e} getting MAC for iface {iface}")
                 continue
             if iface_mac in wlan_ifaces_by_mac:
-                wlan_ifaces_by_mac[iface_mac]['netstack_id'] = iface['id']
+                wlan_ifaces_by_mac[iface_mac]["netstack_id"] = iface["id"]
 
                 # Add to return dict, mapped by role then name.
-                wlan_ifaces_by_role[
-                    wlan_ifaces_by_mac[iface_mac]['role'].lower()][
-                        iface['name']] = wlan_ifaces_by_mac[iface_mac]
+                wlan_ifaces_by_role[wlan_ifaces_by_mac[iface_mac]["role"].lower()][
+                    iface["name"]
+                ] = wlan_ifaces_by_mac[iface_mac]
 
         return wlan_ifaces_by_role
 
@@ -138,45 +140,50 @@
             EnvironmentError - failure to get/set regulatory region
             ConnectionError - failure to query PHYs
         """
-        self.log.info('Setting DUT country code to %s' % country_code)
+        self.log.info("Setting DUT country code to %s" % country_code)
         country_code_response = self.device.sl4f.regulatory_region_lib.setRegion(
-            country_code)
-        if country_code_response.get('error'):
+            country_code
+        )
+        if country_code_response.get("error"):
             raise EnvironmentError(
-                'Failed to set country code (%s) on DUT. Error: %s' %
-                (country_code, country_code_response['error']))
+                "Failed to set country code (%s) on DUT. Error: %s"
+                % (country_code, country_code_response["error"])
+            )
 
-        self.log.info('Verifying DUT country code was correctly set to %s.' %
-                      country_code)
+        self.log.info(
+            "Verifying DUT country code was correctly set to %s." % country_code
+        )
         phy_ids_response = self.device.sl4f.wlan_lib.wlanPhyIdList()
-        if phy_ids_response.get('error'):
-            raise ConnectionError('Failed to get phy ids from DUT. Error: %s' %
-                                  (country_code, phy_ids_response['error']))
+        if phy_ids_response.get("error"):
+            raise ConnectionError(
+                "Failed to get phy ids from DUT. Error: %s"
+                % (country_code, phy_ids_response["error"])
+            )
 
         end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
         while time.time() < end_time:
-            for id in phy_ids_response['result']:
-                get_country_response = self.device.sl4f.wlan_lib.wlanGetCountry(
-                    id)
-                if get_country_response.get('error'):
+            for id in phy_ids_response["result"]:
+                get_country_response = self.device.sl4f.wlan_lib.wlanGetCountry(id)
+                if get_country_response.get("error"):
                     raise ConnectionError(
-                        'Failed to query PHY ID (%s) for country. Error: %s' %
-                        (id, get_country_response['error']))
+                        "Failed to query PHY ID (%s) for country. Error: %s"
+                        % (id, get_country_response["error"])
+                    )
 
-                set_code = ''.join([
-                    chr(ascii_char)
-                    for ascii_char in get_country_response['result']
-                ])
+                set_code = "".join(
+                    [chr(ascii_char) for ascii_char in get_country_response["result"]]
+                )
                 if set_code != country_code:
                     self.log.debug(
-                        'PHY (id: %s) has incorrect country code set. '
-                        'Expected: %s, Got: %s' % (id, country_code, set_code))
+                        "PHY (id: %s) has incorrect country code set. "
+                        "Expected: %s, Got: %s" % (id, country_code, set_code)
+                    )
                     break
             else:
-                self.log.info('All PHYs have expected country code (%s)' %
-                              country_code)
+                self.log.info("All PHYs have expected country code (%s)" % country_code)
                 break
             time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
         else:
-            raise EnvironmentError('Failed to set DUT country code to %s.' %
-                                   country_code)
+            raise EnvironmentError(
+                "Failed to set DUT country code to %s." % country_code
+            )
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
index 25f06b4..5ef126b 100644
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
+++ b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
@@ -14,26 +14,27 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import subprocess
 import time
 
 from antlion import logger
 from antlion import signals
 
-from antlion.controllers.fuchsia_lib.ffx import FFX, FFXError, FFXTimeout
 from antlion.controllers.fuchsia_lib.sl4f import SL4F
+from antlion.controllers.fuchsia_lib.ssh import SSHProvider
 
 SAVED_NETWORKS = "saved_networks"
 CLIENT_STATE = "client_connections_state"
 CONNECTIONS_ENABLED = "ConnectionsEnabled"
 CONNECTIONS_DISABLED = "ConnectionsDisabled"
 
-STATE_CONNECTED = 'Connected'
-STATE_CONNECTING = 'Connecting'
-STATE_DISCONNECTED = 'Disconnected'
-STATE_CONNECTION_STOPPED = 'ConnectionStopped'
+STATE_CONNECTED = "Connected"
+STATE_CONNECTING = "Connecting"
+STATE_DISCONNECTED = "Disconnected"
+STATE_CONNECTION_STOPPED = "ConnectionStopped"
 
+SESSION_MANAGER_TIMEOUT_SEC = 10
 FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT = 30
+DEFAULT_GET_UPDATE_TIMEOUT = 60
 
 
 class WlanPolicyControllerError(signals.ControllerError):
@@ -43,104 +44,91 @@
 class WlanPolicyController:
     """Contains methods related to the wlan policy layer, to be used in the
     FuchsiaDevice object.
+
+    Attributes:
+        sl4f: sl4f module for communicating to the WLAN policy controller.
+        ssh: transport to fuchsia device to stop component processes.
     """
 
-    def __init__(self, sl4f: SL4F, ffx: FFX):
-        self.client_controller = False
+    def __init__(self, sl4f: SL4F, ssh: SSHProvider):
+        """
+        Args:
+            sl4f: sl4f module for communicating to the WLAN policy controller.
+            ssh: transport to fuchsia device to stop component processes.
+        """
         self.preserved_networks_and_client_state = None
         self.policy_configured = False
         self.sl4f = sl4f
-        self.ffx = ffx
+        self.ssh = ssh
         self.log = logger.create_tagged_trace_logger(
-            f'WlanPolicyController | {ffx.ip}')
+            f"WlanPolicyController | {self.ssh.config.host_name}"
+        )
 
-    # TODO(b/231252355): Lower default timeout to 15s once ffx becomes more
-    # performant and/or reliable.
     def configure_wlan(
-            self,
-            preserve_saved_networks: bool,
-            timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT) -> None:
+        self,
+        preserve_saved_networks: bool,
+        timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT,
+    ) -> None:
         """Sets up wlan policy layer.
 
         Args:
             preserve_saved_networks: whether to clear existing saved
                 networks and client state, to be restored at test close.
-            timeout: time to wait for device to configure WLAN.
+            timeout_sec: time to wait for device to configure WLAN.
         """
-        end_time_sec = time.time() + timeout_sec
 
-        # Kill basemgr (Component v1 version of session manager)
-        while time.time() < end_time_sec:
-            response = self.sl4f.basemgr_lib.killBasemgr()
-            if not response.get('error'):
-                self.log.debug('Basemgr kill call successfully issued.')
-                break
-            self.log.debug(response['error'])
-            time.sleep(1)
-        else:
-            raise WlanPolicyControllerError(
-                'Failed to issue successful basemgr kill call.')
-
-        # Stop the session manager, which also holds the Policy controller.
-        try:
-            result = self.ffx.run(
-                'component destroy /core/session-manager/session:session',
-                skip_status_code_check=True)
-
-            if result.returncode == 0:
-                self.log.debug(f"Stopped session: {result.stdout}.")
-            else:
-                if (b'InstanceNotFound' in result.stderr
-                        or b'instance was not found' in result.stderr
-                        or b'does not exist' in result.stderr):
-                    self.log.debug(f'Instance was not found: {result.stderr}.')
-                else:
-                    raise WlanPolicyControllerError(
-                        f'Failed to stop the session: {result.stderr}.')
-        except FFXTimeout or FFXError as e:
-            raise WlanPolicyControllerError from e
+        # We need to stop session manager to free control of
+        # fuchsia.wlan.policy.ClientController, which can only be used by a
+        # single caller at a time. SL4F needs the ClientController to trigger
+        # WLAN policy state changes. On eng builds the session_manager can be
+        # restarted after being stopped during reboot so we attempt killing the
+        # session manager process for 10 seconds.
+        # See https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/client_provider.fidl
+        if "cast_agent.cm" in self.ssh.run("ps").stdout:
+            end_time_session_manager_sec = time.time() + SESSION_MANAGER_TIMEOUT_SEC
+            while time.time() < end_time_session_manager_sec:
+                self.ssh.stop_component("session_manager", is_cfv2_component=True)
 
         # Acquire control of policy layer
+        end_time_config_sec = time.time() + timeout_sec
         controller_errors = []
-        while time.time() < end_time_sec:
+        while time.time() < end_time_config_sec:
             # Create a client controller
             response = self.sl4f.wlan_policy_lib.wlanCreateClientController()
-            if response.get('error'):
-                controller_errors.append(response['error'])
-                self.log.debug(response['error'])
-                time.sleep(1)
-                continue
-            # Attempt to use the client controller (failure indicates a closed
-            # channel, meaning the client controller was rejected.
-            response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks()
-            if response.get('error'):
-                controller_errors.append(response['error'])
-                self.log.debug(response['error'])
+            if response.get("error"):
+                controller_errors.append(response["error"])
+                self.log.debug(response["error"])
                 time.sleep(1)
                 continue
             break
         else:
             self.log.warning(
                 "Failed to create and use a WLAN policy client controller. Errors: ["
-                + "; ".join(controller_errors) + "]")
+                + "; ".join(controller_errors)
+                + "]"
+            )
             raise WlanPolicyControllerError(
-                'Failed to create and use a WLAN policy client controller.')
+                "Failed to create and use a WLAN policy client controller."
+            )
 
-        self.log.info('ACTS tests now have control of the WLAN policy layer.')
+        self.log.info("ACTS tests now have control of the WLAN policy layer.")
 
         if preserve_saved_networks and not self.preserved_networks_and_client_state:
-            self.preserved_networks_and_client_state = self.remove_and_preserve_networks_and_client_state(
+            self.preserved_networks_and_client_state = (
+                self.remove_and_preserve_networks_and_client_state()
             )
         if not self.start_client_connections():
             raise WlanPolicyControllerError(
-                'Failed to start client connections during configuration.')
+                "Failed to start client connections during configuration."
+            )
 
         self.policy_configured = True
 
     def _deconfigure_wlan(self):
         if not self.stop_client_connections():
             raise WlanPolicyControllerError(
-                'Failed to stop client connections during deconfiguration.')
+                "Failed to stop client connections during deconfiguration."
+            )
         self.policy_configured = False
 
     def clean_up(self) -> None:
@@ -159,9 +147,10 @@
         Returns:
             True, if successful. False otherwise."""
         start_response = self.sl4f.wlan_policy_lib.wlanStartClientConnections()
-        if start_response.get('error'):
-            self.log.error('Failed to start client connections. Err: %s' %
-                           start_response['error'])
+        if start_response.get("error"):
+            self.log.error(
+                "Failed to start client connections. Err: %s" % start_response["error"]
+            )
             return False
         return True
 
@@ -172,14 +161,15 @@
         Returns:
             True, if successful. False otherwise."""
         stop_response = self.sl4f.wlan_policy_lib.wlanStopClientConnections()
-        if stop_response.get('error'):
-            self.log.error('Failed to stop client connections. Err: %s' %
-                           stop_response['error'])
+        if stop_response.get("error"):
+            self.log.error(
+                "Failed to stop client connections. Err: %s" % stop_response["error"]
+            )
             return False
         return True
 
     def save_and_connect(self, ssid, security, password=None, timeout=30):
-        """ Saves and connects to the network. This is the policy version of
+        """Saves and connects to the network. This is the policy version of
         connect and check_connect_response because the policy layer
         requires a saved network and the policy connect does not return
         success or failure
@@ -202,11 +192,7 @@
             return False
         return self.wait_for_connect(ssid, security, timeout=timeout)
 
-    def save_and_wait_for_autoconnect(self,
-                                      ssid,
-                                      security,
-                                      password=None,
-                                      timeout=30):
+    def save_and_wait_for_autoconnect(self, ssid, security, password=None, timeout=30):
         """Saves a network and waits, expecting an autoconnection to the newly
         saved network. This differes from save_and_connect, as it doesn't
         expressly trigger a connection first. There are cases in which an
@@ -227,13 +213,9 @@
             return False
         return self.wait_for_connect(ssid, security, timeout=timeout)
 
-    def remove_and_wait_for_disconnect(self,
-                                       ssid,
-                                       security_type,
-                                       password=None,
-                                       state=None,
-                                       status=None,
-                                       timeout=30):
+    def remove_and_wait_for_disconnect(
+        self, ssid, security_type, password=None, state=None, status=None, timeout=30
+    ):
         """Removes a single network and waits for a disconnect. It is not
         guaranteed the device will stay disconnected, as it may autoconnect
         to a different saved network.
@@ -254,25 +236,30 @@
         self.sl4f.wlan_policy_lib.wlanSetNewListener()
         if not self.remove_network(ssid, security_type, password=password):
             return False
-        return self.wait_for_disconnect(ssid,
-                                        security_type,
-                                        state=state,
-                                        status=status,
-                                        timeout=timeout)
+        return self.wait_for_disconnect(
+            ssid, security_type, state=state, status=status, timeout=timeout
+        )
 
-    def remove_all_networks_and_wait_for_no_connections(self, timeout=30):
+    def remove_all_networks_and_wait_for_no_connections(
+        self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT
+    ) -> bool:
         """Removes all networks and waits until device is not connected to any
         networks. This should be used as the policy version of disconnect.
 
+        Args:
+            timeout_sec: The time to wait to see no connections.
+
         Returns:
             True, if successful. False otherwise.
         """
         self.sl4f.wlan_policy_lib.wlanSetNewListener()
         if not self.remove_all_networks():
-            self.log.error('Failed to remove all networks. Cannot continue to '
-                           'wait_for_no_connections.')
+            self.log.error(
+                "Failed to remove all networks. Cannot continue to "
+                "wait_for_no_connections."
+            )
             return False
-        return self.wait_for_no_connections(timeout=timeout)
+        return self.wait_for_no_connections(timeout_sec=timeout_sec)
 
     def save_network(self, ssid, security_type, password=None):
         """Save a network via the policy layer.
@@ -286,10 +273,13 @@
             True, if successful. False otherwise.
         """
         save_response = self.sl4f.wlan_policy_lib.wlanSaveNetwork(
-            ssid, security_type, target_pwd=password)
-        if save_response.get('error'):
-            self.log.error('Failed to save network %s with error: %s' %
-                           (ssid, save_response['error']))
+            ssid, security_type, target_pwd=password
+        )
+        if save_response.get("error"):
+            self.log.error(
+                "Failed to save network %s with error: %s"
+                % (ssid, save_response["error"])
+            )
             return False
         return True
 
@@ -305,10 +295,13 @@
             True, if successful. False otherwise.
         """
         remove_response = self.sl4f.wlan_policy_lib.wlanRemoveNetwork(
-            ssid, security_type, target_pwd=password)
-        if remove_response.get('error'):
-            self.log.error('Failed to remove network %s with error: %s' %
-                           (ssid, remove_response['error']))
+            ssid, security_type, target_pwd=password
+        )
+        if remove_response.get("error"):
+            self.log.error(
+                "Failed to remove network %s with error: %s"
+                % (ssid, remove_response["error"])
+            )
             return False
         return True
 
@@ -319,9 +312,11 @@
             True, if successful. False otherwise.
         """
         remove_all_response = self.sl4f.wlan_policy_lib.wlanRemoveAllNetworks()
-        if remove_all_response.get('error'):
-            self.log.error('Error occurred removing all networks: %s' %
-                           remove_all_response['error'])
+        if remove_all_response.get("error"):
+            self.log.error(
+                "Error occurred removing all networks: %s"
+                % remove_all_response["error"]
+            )
             return False
         return True
 
@@ -334,13 +329,13 @@
         Raises:
             WlanPolicyControllerError, if retrieval fails.
         """
-        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks(
-        )
-        if saved_networks_response.get('error'):
+        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks()
+        if saved_networks_response.get("error"):
             raise WlanPolicyControllerError(
-                'Failed to retrieve saved networks: %s' %
-                saved_networks_response['error'])
-        return saved_networks_response['result']
+                "Failed to retrieve saved networks: %s"
+                % saved_networks_response["error"]
+            )
+        return saved_networks_response["result"]
 
     def send_connect_command(self, ssid, security_type):
         """Sends a connect command to a network that is already saved. This does
@@ -355,17 +350,17 @@
         Returns:
             True, if command send successfully. False otherwise.
         """
-        connect_response = self.sl4f.wlan_policy_lib.wlanConnect(
-            ssid, security_type)
-        if connect_response.get('error'):
+        connect_response = self.sl4f.wlan_policy_lib.wlanConnect(ssid, security_type)
+        if connect_response.get("error"):
             self.log.error(
-                'Error occurred when sending policy connect command: %s' %
-                connect_response['error'])
+                "Error occurred when sending policy connect command: %s"
+                % connect_response["error"]
+            )
             return False
         return True
 
     def wait_for_connect(self, ssid, security_type, timeout=30):
-        """ Wait until the device has connected to the specified network.
+        """Wait until the device has connected to the specified network.
         Args:
             ssid: string, the network name
             security: string, security type of network (see sl4f.wlan_policy_lib)
@@ -380,45 +375,47 @@
             time_left = max(1, int(end_time - time.time()))
 
             try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(
-                    timeout=time_left)
+                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left)
             except TimeoutError:
-                self.log.error('Timed out waiting for response from device '
-                               'while waiting for network with SSID "%s" to '
-                               'connect. Device took too long to connect or '
-                               'the request timed out for another reason.' %
-                               ssid)
+                self.log.error(
+                    "Timed out waiting for response from device "
+                    'while waiting for network with SSID "%s" to '
+                    "connect. Device took too long to connect or "
+                    "the request timed out for another reason." % ssid
+                )
                 self.sl4f.wlan_policy_lib.wlanSetNewListener()
                 return False
-            if update.get('error'):
+            if update.get("error"):
                 # This can occur for many reasons, so it is not necessarily a
                 # failure.
-                self.log.debug('Error occurred getting status update: %s' %
-                               update['error'])
+                self.log.debug(
+                    "Error occurred getting status update: %s" % update["error"]
+                )
                 continue
 
-            for network in update['result']['networks']:
-                if network['id']['ssid'] == ssid or network['id'][
-                        'type_'].lower() == security_type.lower():
-                    if 'state' not in network:
+            for network in update["result"]["networks"]:
+                if (
+                    network["id"]["ssid"] == ssid
+                    or network["id"]["type_"].lower() == security_type.lower()
+                ):
+                    if "state" not in network:
                         raise WlanPolicyControllerError(
-                            'WLAN status missing state field.')
-                    elif network['state'].lower() == STATE_CONNECTED.lower():
+                            "WLAN status missing state field."
+                        )
+                    elif network["state"].lower() == STATE_CONNECTED.lower():
                         return True
             # Wait a bit before requesting another status update
             time.sleep(1)
         # Stopped getting updates because out timeout
-        self.log.error('Timed out waiting for network with SSID "%s" to '
-                       "connect" % ssid)
+        self.log.error(
+            'Timed out waiting for network with SSID "%s" to ' "connect" % ssid
+        )
         return False
 
-    def wait_for_disconnect(self,
-                            ssid,
-                            security_type,
-                            state=None,
-                            status=None,
-                            timeout=30):
-        """ Wait for a disconnect of the specified network on the given device. This
+    def wait_for_disconnect(
+        self, ssid, security_type, state=None, status=None, timeout=30
+    ):
+        """Wait for a disconnect of the specified network on the given device. This
         will check that the correct connection state and disconnect status are
         given in update. If we do not see a disconnect after some time,
         return false.
@@ -443,69 +440,78 @@
         while time.time() < end_time:
             time_left = max(1, int(end_time - time.time()))
             try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(
-                    timeout=time_left)
+                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left)
             except TimeoutError:
                 self.log.error(
-                    'Timed out waiting for response from device '
+                    "Timed out waiting for response from device "
                     'while waiting for network with SSID "%s" to '
-                    'disconnect. Device took too long to disconnect '
-                    'or the request timed out for another reason.' % ssid)
+                    "disconnect. Device took too long to disconnect "
+                    "or the request timed out for another reason." % ssid
+                )
                 self.sl4f.wlan_policy_lib.wlanSetNewListener()
                 return False
 
-            if update.get('error'):
+            if update.get("error"):
                 # This can occur for many reasons, so it is not necessarily a
                 # failure.
-                self.log.debug('Error occurred getting status update: %s' %
-                               update['error'])
+                self.log.debug(
+                    "Error occurred getting status update: %s" % update["error"]
+                )
                 continue
             # Update should include network, either connected to or recently disconnected.
-            if len(update['result']['networks']) == 0:
-                raise WlanPolicyControllerError(
-                    'WLAN state update is missing network.')
+            if len(update["result"]["networks"]) == 0:
+                raise WlanPolicyControllerError("WLAN state update is missing network.")
 
-            for network in update['result']['networks']:
-                if network['id']['ssid'] == ssid or network['id'][
-                        'type_'].lower() == security_type.lower():
-                    if 'state' not in network or 'status' not in network:
+            for network in update["result"]["networks"]:
+                if (
+                    network["id"]["ssid"] == ssid
+                    or network["id"]["type_"].lower() == security_type.lower()
+                ):
+                    if "state" not in network or "status" not in network:
                         raise WlanPolicyControllerError(
-                            'Client state summary\'s network is missing fields'
+                            "Client state summary's network is missing fields"
                         )
                     # If still connected, we will wait for another update and check again
-                    elif network['state'].lower() == STATE_CONNECTED.lower():
+                    elif network["state"].lower() == STATE_CONNECTED.lower():
                         continue
-                    elif network['state'].lower() == STATE_CONNECTING.lower():
+                    elif network["state"].lower() == STATE_CONNECTING.lower():
                         self.log.error(
                             'Update is "Connecting", but device should already be '
-                            'connected; expected disconnect')
+                            "connected; expected disconnect"
+                        )
                         return False
                     # Check that the network state and disconnect status are expected, ie
                     # that it isn't ConnectionFailed when we expect ConnectionStopped
-                    elif network['state'].lower() != state.lower(
-                    ) or network['status'].lower() != status.lower():
+                    elif (
+                        network["state"].lower() != state.lower()
+                        or network["status"].lower() != status.lower()
+                    ):
                         self.log.error(
-                            'Connection failed: a network failure occurred that is unrelated'
-                            'to remove network or incorrect status update. \nExpected state: '
-                            '%s, Status: %s,\nActual update: %s' %
-                            (state, status, network))
+                            "Connection failed: a network failure occurred that is unrelated"
+                            "to remove network or incorrect status update. \nExpected state: "
+                            "%s, Status: %s,\nActual update: %s"
+                            % (state, status, network)
+                        )
                         return False
                     else:
                         return True
             # Wait a bit before requesting another status update
             time.sleep(1)
         # Stopped getting updates because out timeout
-        self.log.error('Timed out waiting for network with SSID "%s" to '
-                       'connect' % ssid)
+        self.log.error(
+            'Timed out waiting for network with SSID "%s" to ' "connect" % ssid
+        )
         return False
 
-    def wait_for_no_connections(self, timeout=30):
-        """ Waits to see that there are no existing connections the device. This
+    def wait_for_no_connections(
+        self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT
+    ) -> bool:
+        """Waits to see that there are no existing connections the device. This
         is the simplest way to watch for disconnections when only a single
         network is saved/present.
 
         Args:
-            timeout: int, time in seconds to wait to see no connections
+            timeout_sec: The time to wait to see no connections.
 
         Returns:
             True, if successful. False, if still connected after timeout.
@@ -514,16 +520,16 @@
         # then an update won't be generated by the device, and we'll time out.
         # Force an update by getting a new listener.
         self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        end_time = time.time() + timeout
+        end_time = time.time() + timeout_sec
         while time.time() < end_time:
             time_left = max(1, int(end_time - time.time()))
             try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(
-                    timeout=time_left)
+                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left)
             except TimeoutError:
                 self.log.info(
                     "Timed out getting status update while waiting for all"
-                    " connections to end.")
+                    " connections to end."
+                )
                 self.sl4f.wlan_policy_lib.wlanSetNewListener()
                 return False
 
@@ -532,17 +538,18 @@
                 return False
             # If any network is connected or being connected to, wait for them
             # to disconnect.
-            if any(network['state'].lower() in
-                   {STATE_CONNECTED.lower(),
-                    STATE_CONNECTING.lower()}
-                   for network in update['result']['networks']):
+            if any(
+                network["state"].lower()
+                in {STATE_CONNECTED.lower(), STATE_CONNECTING.lower()}
+                for network in update["result"]["networks"]
+            ):
                 continue
             else:
                 return True
         return False
 
     def remove_and_preserve_networks_and_client_state(self):
-        """ Preserves networks already saved on devices before removing them to
+        """Preserves networks already saved on devices before removing them to
         setup up for a clean test environment. Records the state of client
         connections before tests.
 
@@ -551,47 +558,49 @@
         """
         # Save preexisting saved networks
         preserved_networks_and_state = {}
-        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks(
-        )
-        if saved_networks_response.get('error'):
+        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks()
+        if saved_networks_response.get("error"):
             raise WlanPolicyControllerError(
-                'Failed to get preexisting saved networks: %s' %
-                saved_networks_response['error'])
-        if saved_networks_response.get('result') != None:
-            preserved_networks_and_state[
-                SAVED_NETWORKS] = saved_networks_response['result']
+                "Failed to get preexisting saved networks: %s"
+                % saved_networks_response["error"]
+            )
+        if saved_networks_response.get("result") != None:
+            preserved_networks_and_state[SAVED_NETWORKS] = saved_networks_response[
+                "result"
+            ]
 
         # Remove preexisting saved networks
         if not self.remove_all_networks():
             raise WlanPolicyControllerError(
-                'Failed to clear networks and disconnect at FuchsiaDevice creation.'
+                "Failed to clear networks and disconnect at FuchsiaDevice creation."
             )
 
         self.sl4f.wlan_policy_lib.wlanSetNewListener()
         update_response = self.sl4f.wlan_policy_lib.wlanGetUpdate()
-        update_result = update_response.get('result', {})
-        if update_result.get('state'):
-            preserved_networks_and_state[CLIENT_STATE] = update_result['state']
+        update_result = update_response.get("result", {})
+        if update_result.get("state"):
+            preserved_networks_and_state[CLIENT_STATE] = update_result["state"]
         else:
-            self.log.warn('Failed to get update; test will not start or '
-                          'stop client connections at the end of the test.')
+            self.log.warn(
+                "Failed to get update; test will not start or "
+                "stop client connections at the end of the test."
+            )
 
-        self.log.info('Saved networks cleared and preserved.')
+        self.log.info("Saved networks cleared and preserved.")
         return preserved_networks_and_state
 
     def restore_preserved_networks_and_client_state(self):
-        """ Restore saved networks and client state onto device if they have
+        """Restore saved networks and client state onto device if they have
         been preserved.
         """
         if not self.remove_all_networks():
-            self.log.warn('Failed to remove saved networks before restore.')
+            self.log.warn("Failed to remove saved networks before restore.")
         restore_success = True
-        for network in self.preserved_networks_and_client_state[
-                SAVED_NETWORKS]:
-            if not self.save_network(network["ssid"], network["security_type"],
-                                     network["credential_value"]):
-                self.log.warn('Failed to restore network (%s).' %
-                              network['ssid'])
+        for network in self.preserved_networks_and_client_state[SAVED_NETWORKS]:
+            if not self.save_network(
+                network["ssid"], network["security_type"], network["credential_value"]
+            ):
+                self.log.warn("Failed to restore network (%s)." % network["ssid"])
                 restore_success = False
         starting_state = self.preserved_networks_and_client_state[CLIENT_STATE]
         if starting_state == CONNECTIONS_ENABLED:
@@ -599,9 +608,9 @@
         else:
             state_restored = self.stop_client_connections()
         if not state_restored:
-            self.log.warn('Failed to restore client connections state.')
+            self.log.warn("Failed to restore client connections state.")
             restore_success = False
         if restore_success:
-            self.log.info('Preserved networks and client state restored.')
+            self.log.info("Preserved networks and client state restored.")
             self.preserved_networks_and_client_state = None
         return restore_success
diff --git a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py b/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
index 7b5c692..54d9e44 100644
--- a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
@@ -18,7 +18,6 @@
 
 
 class FuchsiaRegulatoryRegionLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "location_regulatory_region")
 
diff --git a/src/antlion/controllers/fuchsia_lib/logging_lib.py b/src/antlion/controllers/fuchsia_lib/logging_lib.py
index aba1acf..83825c4 100644
--- a/src/antlion/controllers/fuchsia_lib/logging_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/logging_lib.py
@@ -20,7 +20,6 @@
 
 
 class FuchsiaLoggingLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "logging")
 
@@ -35,7 +34,7 @@
         """
         test_cmd = "logging_facade.LogErr"
         test_args = {
-            "message": '[%s] %s' % (datetime.datetime.now(), message),
+            "message": "[%s] %s" % (datetime.datetime.now(), message),
         }
 
         return self.send_command(test_cmd, test_args)
@@ -50,7 +49,7 @@
             Dictionary, None if success, error if error.
         """
         test_cmd = "logging_facade.LogInfo"
-        test_args = {"message": '[%s] %s' % (datetime.datetime.now(), message)}
+        test_args = {"message": "[%s] %s" % (datetime.datetime.now(), message)}
 
         return self.send_command(test_cmd, test_args)
 
@@ -64,6 +63,6 @@
             Dictionary, None if success, error if error.
         """
         test_cmd = "logging_facade.LogWarn"
-        test_args = {"message": '[%s] %s' % (datetime.datetime.now(), message)}
+        test_args = {"message": "[%s] %s" % (datetime.datetime.now(), message)}
 
         return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py b/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
index 20893b2..481e9bd 100644
--- a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
@@ -18,7 +18,6 @@
 
 
 class FuchsiaNetstackLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "netstack")
 
diff --git a/src/antlion/controllers/fuchsia_lib/package_server.py b/src/antlion/controllers/fuchsia_lib/package_server.py
index b0a45c7..d497e96 100644
--- a/src/antlion/controllers/fuchsia_lib/package_server.py
+++ b/src/antlion/controllers/fuchsia_lib/package_server.py
@@ -31,8 +31,8 @@
 from antlion import signals
 from antlion import utils
 
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError, SSHProvider
-from antlion.controllers.fuchsia_lib.utils_lib import wait_for_port
+from antlion.controllers.fuchsia_lib.ssh import SSHError, SSHProvider
+from antlion.net import wait_for_port
 from antlion.tracelogger import TraceLogger
 
 DEFAULT_FUCHSIA_REPO_NAME = "fuchsia.com"
@@ -45,13 +45,14 @@
 
 def random_port() -> int:
     s = socket.socket()
-    s.bind(('', 0))
+    s.bind(("", 0))
     return s.getsockname()[1]
 
 
 @dataclass
 class Route:
     """Represent a route in the routing table."""
+
     preferred_source: Optional[str]
 
 
@@ -77,9 +78,9 @@
     Returns:
         Routes with destination to dest_ip.
     """
-    resp = subprocess.run(f"ip -json route get {dest_ip}".split(),
-                          capture_output=True,
-                          check=True)
+    resp = subprocess.run(
+        f"ip -json route get {dest_ip}".split(), capture_output=True, check=True
+    )
     routes = json.loads(resp.stdout)
     return [Route(r.get("prefsrc")) for r in routes]
 
@@ -105,7 +106,8 @@
     routes = find_routes_to(device_ip)
     if len(routes) != 1:
         raise PackageServerError(
-            f"Expected only one route to {device_ip}, got {routes}")
+            f"Expected only one route to {device_ip}, got {routes}"
+        )
 
     route = routes[0]
     if not route.preferred_source:
@@ -150,14 +152,13 @@
         Raises:
             TestAbortClass: when the timestamp.json file has expired
         """
-        with open(f'{self._packages_path}/repository/timestamp.json',
-                  'r') as f:
+        with open(f"{self._packages_path}/repository/timestamp.json", "r") as f:
             data = json.load(f)
             expiresAtRaw = data["signed"]["expires"]
-            expiresAt = datetime.strptime(expiresAtRaw, '%Y-%m-%dT%H:%M:%SZ')
+            expiresAt = datetime.strptime(expiresAtRaw, "%Y-%m-%dT%H:%M:%SZ")
             if expiresAt <= datetime.now():
                 raise signals.TestAbortClass(
-                    f'{self._packages_path}/repository/timestamp.json has expired on {expiresAtRaw}'
+                    f"{self._packages_path}/repository/timestamp.json has expired on {expiresAtRaw}"
                 )
 
     def start(self) -> None:
@@ -171,36 +172,39 @@
             )
             return
 
-        pm_command = f'{self._binary_path} serve -c 2 -repo {self._packages_path} -l :{self._port}'
+        pm_command = f"{self._binary_path} serve -c 2 -repo {self._packages_path} -l :{self._port}"
 
         root_dir = context.get_current_context().get_full_output_path()
         epoch = utils.get_current_epoch_time()
         time_stamp = logger.normalize_log_line_timestamp(
-            logger.epoch_to_log_line_timestamp(epoch))
-        self._log_path = os.path.join(root_dir, f'pm_server.{time_stamp}.log')
+            logger.epoch_to_log_line_timestamp(epoch)
+        )
+        self._log_path = os.path.join(root_dir, f"pm_server.{time_stamp}.log")
 
-        self._server_log = open(self._log_path, 'a+')
-        self._server_proc = subprocess.Popen(pm_command.split(),
-                                             preexec_fn=os.setpgrp,
-                                             stdout=self._server_log,
-                                             stderr=subprocess.STDOUT)
+        self._server_log = open(self._log_path, "a+")
+        self._server_proc = subprocess.Popen(
+            pm_command.split(),
+            preexec_fn=os.setpgrp,
+            stdout=self._server_log,
+            stderr=subprocess.STDOUT,
+        )
         try:
-            wait_for_port('127.0.0.1', self._port)
+            wait_for_port("127.0.0.1", self._port)
         except TimeoutError as e:
             if self._server_log:
                 self._server_log.close()
             if self._log_path:
-                with open(self._log_path, 'r') as f:
+                with open(self._log_path, "r") as f:
                     logs = f.read()
             raise TimeoutError(
                 f"pm serve failed to expose port {self._port}. Logs:\n{logs}"
             ) from e
 
-        self.log.info(f'Serving packages on port {self._port}')
+        self.log.info(f"Serving packages on port {self._port}")
 
-    def configure_device(self,
-                         ssh: SSHProvider,
-                         repo_name=DEFAULT_FUCHSIA_REPO_NAME) -> None:
+    def configure_device(
+        self, ssh: SSHProvider, repo_name=DEFAULT_FUCHSIA_REPO_NAME
+    ) -> None:
         """Configure the device to use this package server.
 
         Args:
@@ -209,16 +213,15 @@
         """
         # Remove any existing repositories that may be stale.
         try:
-            ssh.run(f'pkgctl repo rm fuchsia-pkg://{repo_name}')
-        except FuchsiaSSHError as e:
-            if 'NOT_FOUND' not in e.result.stderr:
+            ssh.run(f"pkgctl repo rm fuchsia-pkg://{repo_name}")
+        except SSHError as e:
+            if "NOT_FOUND" not in e.result.stderr:
                 raise e
 
         # Configure the device with the new repository.
         host_ip = find_host_ip(ssh.config.host_name)
         repo_url = f"http://{host_ip}:{self._port}"
-        ssh.run(
-            f"pkgctl repo add url -f 2 -n {repo_name} {repo_url}/config.json")
+        ssh.run(f"pkgctl repo add url -f 2 -n {repo_name} {repo_url}/config.json")
         self.log.info(
             f'Added repo "{repo_name}" as {repo_url} on device {ssh.config.host_name}'
         )
@@ -227,7 +230,8 @@
         """Stop the package server."""
         if not self._server_proc:
             self.log.warn(
-                "Skipping to stop the server since it hasn't been started yet")
+                "Skipping to stop the server since it hasn't been started yet"
+            )
             return
 
         self._server_proc.terminate()
diff --git a/src/antlion/controllers/fuchsia_lib/sl4f.py b/src/antlion/controllers/fuchsia_lib/sl4f.py
index 1958772..e051d7c 100644
--- a/src/antlion/controllers/fuchsia_lib/sl4f.py
+++ b/src/antlion/controllers/fuchsia_lib/sl4f.py
@@ -18,29 +18,26 @@
 import sys
 
 from antlion import logger
-from antlion.controllers.fuchsia_lib import utils_lib
-from antlion.controllers.fuchsia_lib.audio_lib import FuchsiaAudioLib
-from antlion.controllers.fuchsia_lib.basemgr_lib import FuchsiaBasemgrLib
-from antlion.controllers.fuchsia_lib.bt.avdtp_lib import FuchsiaAvdtpLib
-from antlion.controllers.fuchsia_lib.bt.ble_lib import FuchsiaBleLib
-from antlion.controllers.fuchsia_lib.bt.bts_lib import FuchsiaBtsLib
-from antlion.controllers.fuchsia_lib.bt.gattc_lib import FuchsiaGattcLib
-from antlion.controllers.fuchsia_lib.bt.gatts_lib import FuchsiaGattsLib
-from antlion.controllers.fuchsia_lib.bt.hfp_lib import FuchsiaHfpLib
-from antlion.controllers.fuchsia_lib.bt.rfcomm_lib import FuchsiaRfcommLib
-from antlion.controllers.fuchsia_lib.bt.sdp_lib import FuchsiaProfileServerLib
-from antlion.controllers.fuchsia_lib.hardware_power_statecontrol_lib import FuchsiaHardwarePowerStatecontrolLib
-from antlion.controllers.fuchsia_lib.location.regulatory_region_lib import FuchsiaRegulatoryRegionLib
+from antlion.controllers.fuchsia_lib.device_lib import DeviceLib
+from antlion.controllers.fuchsia_lib.hardware_power_statecontrol_lib import (
+    FuchsiaHardwarePowerStatecontrolLib,
+)
+from antlion.controllers.fuchsia_lib.location.regulatory_region_lib import (
+    FuchsiaRegulatoryRegionLib,
+)
 from antlion.controllers.fuchsia_lib.logging_lib import FuchsiaLoggingLib
 from antlion.controllers.fuchsia_lib.netstack.netstack_lib import FuchsiaNetstackLib
-from antlion.controllers.fuchsia_lib.ssh import SSHProvider, FuchsiaSSHError
+from antlion.controllers.fuchsia_lib.ssh import SSHProvider, SSHError
 from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import FuchsiaWlanApPolicyLib
-from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import FuchsiaWlanDeprecatedConfigurationLib
+from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import (
+    FuchsiaWlanDeprecatedConfigurationLib,
+)
 from antlion.controllers.fuchsia_lib.wlan_lib import FuchsiaWlanLib
 from antlion.controllers.fuchsia_lib.wlan_policy_lib import FuchsiaWlanPolicyLib
+from antlion.net import wait_for_port
 
 DEFAULT_SL4F_PORT = 80
-START_SL4F_V2_CMD = 'start_sl4f'
+START_SL4F_V2_CMD = "start_sl4f"
 
 
 class SL4F:
@@ -52,78 +49,65 @@
         log: Logger for the device-specific instance of SL4F.
     """
 
-    def __init__(self, ssh: SSHProvider,
-                 port: int = DEFAULT_SL4F_PORT) -> None:
+    def __init__(self, ssh: SSHProvider, port: int = DEFAULT_SL4F_PORT) -> None:
         """
         Args:
             ssh: SSHProvider transport to start and stop SL4F.
             port: Port for the SL4F server to listen on.
         """
+        host = ssh.config.host_name
+
         if sys.version_info < (3, 9):
             # TODO(http://b/261746355): Remove this if statement once the
             # minimum Python version is 3.9 or newer.
-            host = ipaddress.ip_address(ssh.config.host_name.split('%')[0])
-            if host.version == 4:
-                self.address = f'http://{host}:{port}'
-            elif host.version == 6:
-                host = ssh.config.host_name
-                self.address = f'http://[{host}]:{port}'
+            ip = ipaddress.ip_address(host.split("%")[0])
+            if ip.version == 4:
+                self.address = f"http://{ip}:{port}"
+            elif ip.version == 6:
+                ip = ssh.config.host_name
+                self.address = f"http://[{ip}]:{port}"
         else:
-            host = ipaddress.ip_address(ssh.config.host_name)
-            if host.version == 4:
-                self.address = f'http://{host}:{port}'
-            elif host.version == 6:
-                self.address = f'http://[{host}]:{port}'
+            ip = ipaddress.ip_address(host)
+            if ip.version == 4:
+                self.address = f"http://{ip}:{port}"
+            elif ip.version == 6:
+                self.address = f"http://[{ip}]:{port}"
 
         self.log = logger.create_tagged_trace_logger(f"SL4F | {self.address}")
 
         try:
+            ssh.stop_component("sl4f")
             ssh.run(START_SL4F_V2_CMD).stdout
-        except FuchsiaSSHError:
+        except SSHError:
             # TODO(fxbug.dev/99331) Remove support to run SL4F in CFv1 mode
             # once ACTS no longer use images that comes with only CFv1 SL4F.
             self.log.warn(
                 "Running SL4F in CFv1 mode, "
                 "this is deprecated for images built after 5/9/2022, "
-                "see https://fxbug.dev/77056 for more info.")
-            ssh.stop_v1_component("sl4f")
+                "see https://fxbug.dev/77056 for more info."
+            )
+            ssh.stop_component("sl4f")
             ssh.start_v1_component("sl4f")
 
-        utils_lib.wait_for_port(str(host), port)
+        try:
+            wait_for_port(host, port)
+            self.log.info("SL4F server is reachable")
+        except TimeoutError as e:
+            raise TimeoutError("SL4F server is unreachable") from e
+
         self._init_libraries()
-        self._verify_sl4f_connection()
 
     def _init_libraries(self) -> None:
-        # Grab commands from FuchsiaAudioLib
-        self.audio_lib = FuchsiaAudioLib(self.address)
-
-        # Grab commands from FuchsiaAvdtpLib
-        self.avdtp_lib = FuchsiaAvdtpLib(self.address)
-
-        # Grab commands from FuchsiaHfpLib
-        self.hfp_lib = FuchsiaHfpLib(self.address)
-
-        # Grab commands from FuchsiaRfcommLib
-        self.rfcomm_lib = FuchsiaRfcommLib(self.address)
-
-        # Grab commands from FuchsiaBasemgrLib
-        self.basemgr_lib = FuchsiaBasemgrLib(self.address)
-
-        # Grab commands from FuchsiaBleLib
-        self.ble_lib = FuchsiaBleLib(self.address)
-
-        # Grab commands from FuchsiaBtsLib
-        self.bts_lib = FuchsiaBtsLib(self.address)
-
-        # Grab commands from FuchsiaGattcLib
-        self.gattc_lib = FuchsiaGattcLib(self.address)
-
-        # Grab commands from FuchsiaGattsLib
-        self.gatts_lib = FuchsiaGattsLib(self.address)
+        # Grab commands from DeviceLib
+        self.device_lib = DeviceLib(self.address)
 
         # Grab commands from FuchsiaHardwarePowerStatecontrolLib
-        self.hardware_power_statecontrol_lib = (
-            FuchsiaHardwarePowerStatecontrolLib(self.address))
+        self.hardware_power_statecontrol_lib = FuchsiaHardwarePowerStatecontrolLib(
+            self.address
+        )
+
+        # Grab commands from FuchsiaRegulatoryRegionLib
+        self.regulatory_region_lib = FuchsiaRegulatoryRegionLib(self.address)
 
         # Grab commands from FuchsiaLoggingLib
         self.logging_lib = FuchsiaLoggingLib(self.address)
@@ -131,31 +115,16 @@
         # Grab commands from FuchsiaNetstackLib
         self.netstack_lib = FuchsiaNetstackLib(self.address)
 
-        # Grab commands from FuchsiaProfileServerLib
-        self.sdp_lib = FuchsiaProfileServerLib(self.address)
-
-        # Grab commands from FuchsiaRegulatoryRegionLib
-        self.regulatory_region_lib = FuchsiaRegulatoryRegionLib(self.address)
+        # Grab commands from FuchsiaWlanApPolicyLib
+        self.wlan_ap_policy_lib = FuchsiaWlanApPolicyLib(self.address)
 
         # Grabs command from FuchsiaWlanDeprecatedConfigurationLib
-        self.wlan_deprecated_configuration_lib = (
-            FuchsiaWlanDeprecatedConfigurationLib(self.address))
+        self.wlan_deprecated_configuration_lib = FuchsiaWlanDeprecatedConfigurationLib(
+            self.address
+        )
 
         # Grab commands from FuchsiaWlanLib
         self.wlan_lib = FuchsiaWlanLib(self.address)
 
-        # Grab commands from FuchsiaWlanApPolicyLib
-        self.wlan_ap_policy_lib = FuchsiaWlanApPolicyLib(self.address)
-
         # Grab commands from FuchsiaWlanPolicyLib
         self.wlan_policy_lib = FuchsiaWlanPolicyLib(self.address)
-
-    def _verify_sl4f_connection(self) -> None:
-        """Verify SL4F commands can run on server."""
-
-        self.log.info('Verifying SL4F commands can run.')
-        try:
-            self.wlan_lib.wlanGetIfaceIdList()
-        except Exception as err:
-            raise ConnectionError(
-                f'Failed to connect and run command via SL4F. Err: {err}')
diff --git a/src/antlion/controllers/fuchsia_lib/ssh.py b/src/antlion/controllers/fuchsia_lib/ssh.py
index ec8f762..1d1f421 100644
--- a/src/antlion/controllers/fuchsia_lib/ssh.py
+++ b/src/antlion/controllers/fuchsia_lib/ssh.py
@@ -14,225 +14,25 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import subprocess
 import time
 
-from dataclasses import dataclass
-from typing import List, Union
-
-from antlion import logger
-from antlion import signals
+from antlion.capabilities.ssh import SSHError, SSHProvider
 
 DEFAULT_SSH_USER: str = "fuchsia"
-DEFAULT_SSH_PORT: int = 22
 DEFAULT_SSH_PRIVATE_KEY: str = "~/.ssh/fuchsia_ed25519"
-DEFAULT_SSH_TIMEOUT_SEC: int = 60
-DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 30
-DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
 # The default package repository for all components.
-FUCHSIA_PACKAGE_REPO_NAME = 'fuchsia.com'
+FUCHSIA_PACKAGE_REPO_NAME = "fuchsia.com"
 
 
-class SSHResult:
-    """Result of an SSH command."""
-
-    def __init__(
-        self, process: Union[subprocess.CompletedProcess,
-                             subprocess.CalledProcessError]
-    ) -> None:
-        self._raw_stdout = process.stdout
-        self._stdout = process.stdout.decode('utf-8', errors='replace')
-        self._stderr = process.stderr.decode('utf-8', errors='replace')
-        self._exit_status: int = process.returncode
-
-    def __str__(self):
-        if self.exit_status == 0:
-            return self.stdout
-        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
-
-    @property
-    def stdout(self) -> str:
-        return self._stdout
-
-    @property
-    def stderr(self) -> str:
-        return self._stderr
-
-    @property
-    def exit_status(self) -> int:
-        return self._exit_status
-
-    @property
-    def raw_stdout(self) -> bytes:
-        return self._raw_stdout
-
-
-class FuchsiaSSHError(signals.TestError):
-    """A SSH command returned with a non-zero status code."""
-
-    def __init__(self, command: str, result: SSHResult):
-        super().__init__(
-            f'SSH command "{command}" unexpectedly returned {result}')
-        self.result = result
-
-
-class SSHTimeout(signals.TestError):
-    """A SSH command timed out."""
-
-    def __init__(self, err: subprocess.TimeoutExpired):
-        super().__init__(
-            f'SSH command "{err.cmd}" timed out after {err.timeout}s, '
-            f'stdout="{err.stdout}", stderr="{err.stderr}"')
-
-
-class FuchsiaSSHTransportError(signals.TestError):
-    """Failure to send an SSH command."""
-
-
-@dataclass
-class SSHConfig:
-    """SSH client config."""
-
-    # SSH flags. See ssh(1) for full details.
-    host_name: str
-    identity_file: str
-
-    ssh_binary: str = 'ssh'
-    config_file: str = '/dev/null'
-    port: int = 22
-    user: str = DEFAULT_SSH_USER
-
-    # SSH options. See ssh_config(5) for full details.
-    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
-    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
-    strict_host_key_checking: bool = False
-    user_known_hosts_file: str = "/dev/null"
-    log_level: str = "ERROR"
-
-    def full_command(self, command: str, force_tty: bool = False) -> List[str]:
-        """Generate the complete command to execute command over SSH.
-
-        Args:
-            command: The command to run over SSH
-            force_tty: Force pseudo-terminal allocation. This can be used to
-                execute arbitrary screen-based programs on a remote machine,
-                which can be very useful, e.g. when implementing menu services.
-
-        Returns:
-            Arguments composing the complete call to SSH.
-        """
-        optional_flags = []
-        if force_tty:
-            # Multiple -t options force tty allocation, even if ssh has no local
-            # tty. This is necessary for launching ssh with subprocess without
-            # shell=True.
-            optional_flags.append('-tt')
-
-        return [
-            self.ssh_binary,
-            # SSH flags
-            '-i',
-            self.identity_file,
-            '-F',
-            self.config_file,
-            '-p',
-            str(self.port),
-            # SSH configuration options
-            '-o',
-            f'ConnectTimeout={self.connect_timeout}',
-            '-o',
-            f'ServerAliveInterval={self.server_alive_interval}',
-            '-o',
-            f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
-            '-o',
-            f'UserKnownHostsFile={self.user_known_hosts_file}',
-            '-o',
-            f'LogLevel={self.log_level}',
-        ] + optional_flags + [
-            f'{self.user}@{self.host_name}'
-        ] + command.split()
-
-
-class SSHProvider:
+class FuchsiaSSHProvider(SSHProvider):
     """Device-specific provider for SSH clients."""
 
-    def __init__(self, config: SSHConfig) -> None:
-        """
-        Args:
-            config: SSH client config
-        """
-        logger_tag = f"ssh | {config.host_name}"
-        if config.port != DEFAULT_SSH_PORT:
-            logger_tag += f':{config.port}'
-
-        # Check if the private key exists
-
-        self.log = logger.create_tagged_trace_logger(logger_tag)
-        self.config = config
-
-    def run(self,
-            command: str,
-            timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
-            connect_retries: int = 3,
-            force_tty: bool = False) -> SSHResult:
-        """Run a command on the device then exit.
-
-        Args:
-            command: String to send to the device.
-            timeout_sec: Seconds to wait for the command to complete.
-            connect_retries: Amount of times to retry connect on fail.
-            force_tty: Force pseudo-terminal allocation.
-
-        Raises:
-            FuchsiaSSHError: if the SSH command returns a non-zero status code
-            FuchsiaSSHTimeout: if there is no response within timeout_sec
-            FuchsiaSSHTransportError: if SSH fails to run the command
-
-        Returns:
-            SSHResults from the executed command.
-        """
-        err: Exception
-        for i in range(0, connect_retries):
-            try:
-                return self._run(command, timeout_sec, force_tty)
-            except FuchsiaSSHTransportError as e:
-                err = e
-                self.log.warn(f'Connect failed: {e}')
-        raise err
-
-    def _run(self, command: str, timeout_sec: int, force_tty: bool) -> SSHResult:
-        full_command = self.config.full_command(command, force_tty)
-        self.log.debug(f'Running "{" ".join(full_command)}"')
-        try:
-            process = subprocess.run(full_command,
-                                     capture_output=True,
-                                     timeout=timeout_sec,
-                                     check=True)
-        except subprocess.CalledProcessError as e:
-            if e.returncode == 255:
-                stderr = e.stderr.decode('utf-8', errors='replace')
-                if 'Name or service not known' in stderr or 'Host does not exist' in stderr:
-                    raise FuchsiaSSHTransportError(
-                        f'Hostname {self.config.host_name} cannot be resolved to an address'
-                    ) from e
-                if 'Connection timed out' in stderr:
-                    raise FuchsiaSSHTransportError(
-                        f'Failed to establish a connection to {self.config.host_name} within {timeout_sec}s'
-                    ) from e
-                if 'Connection refused' in stderr:
-                    raise FuchsiaSSHTransportError(
-                        f'Connection refused by {self.config.host_name}') from e
-
-            raise FuchsiaSSHError(command, SSHResult(e)) from e
-        except subprocess.TimeoutExpired as e:
-            raise SSHTimeout(e) from e
-
-        return SSHResult(process)
-
-    def start_v1_component(self,
-                           component: str,
-                           timeout_sec: int = 5,
-                           repo: str = FUCHSIA_PACKAGE_REPO_NAME) -> None:
+    def start_v1_component(
+        self,
+        component: str,
+        timeout_sec: int = 5,
+        repo: str = FUCHSIA_PACKAGE_REPO_NAME,
+    ) -> None:
         """Start a CFv1 component in the background.
 
         Args:
@@ -246,26 +46,34 @@
         # The "run -d" command will hang when executed without a pseudo-tty
         # allocated.
         self.run(
-            f'run -d fuchsia-pkg://{repo}/{component}#meta/{component}.cmx', force_tty=True)
+            f"run -d fuchsia-pkg://{repo}/{component}#meta/{component}.cmx",
+            force_tty=True,
+        )
 
         timeout = time.perf_counter() + timeout_sec
         while True:
             ps_cmd = self.run("ps")
-            if f'{component}.cmx' in ps_cmd.stdout:
+            if f"{component}.cmx" in ps_cmd.stdout:
                 return
             if time.perf_counter() > timeout:
                 raise TimeoutError(
-                    f'Failed to start "{component}.cmx" after {timeout_sec}s')
+                    f'Failed to start "{component}.cmx" after {timeout_sec}s'
+                )
 
-    def stop_v1_component(self, component: str) -> None:
-        """Stop all instances of a CFv1 component.
+    def stop_component(self, component: str, is_cfv2_component: bool = False) -> None:
+        """Stop all instances of a CFv1 or CFv2 component.
 
         Args:
-            component: Name of the component without ".cmx"
+            component: Name of the component without suffix("cm" or "cmx").
+            is_cfv2_component: Determines the component suffix to use.
         """
+        suffix = "cm" if is_cfv2_component else "cmx"
+
         try:
-            self.run(f'killall {component}.cmx')
-        except FuchsiaSSHError as e:
-            if 'no tasks found' in e.result.stderr:
+            self.run(f"killall {component}.{suffix}")
+            self.log.info(f"Stopped component: {component}.{suffix}")
+        except SSHError as e:
+            if "no tasks found" in e.result.stderr:
+                self.log.debug(f"Could not find component: {component}.{suffix}")
                 return
             raise e
diff --git a/src/antlion/controllers/fuchsia_lib/utils_lib.py b/src/antlion/controllers/fuchsia_lib/utils_lib.py
index 897749f..1e1336a 100644
--- a/src/antlion/controllers/fuchsia_lib/utils_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/utils_lib.py
@@ -16,18 +16,17 @@
 
 import os
 import logging
-import socket
 import tarfile
 import tempfile
 import time
 
 from antlion import utils
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError
 from antlion.libs.proc import job
 from antlion.utils import get_fuchsia_mdns_ipv6_address
 
 MDNS_LOOKUP_RETRY_MAX = 3
 FASTBOOT_TIMEOUT = 30
+FLASH_TIMEOUT_SEC = 60 * 5  # 5 minutes
 AFTER_FLASH_BOOT_TIME = 30
 WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC = 360
 PROCESS_CHECK_WAIT_TIME_SEC = 30
@@ -36,8 +35,7 @@
 FUCHSIA_RELEASE_TESTING_URL = "gs://fuchsia-release-testing/images"
 
 
-def flash(fuchsia_device, use_ssh=False,
-          fuchsia_reconnect_after_reboot_time=5):
+def flash(fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5):
     """A function to flash, not pave, a fuchsia_device
 
     Args:
@@ -47,24 +45,27 @@
         True if successful.
     """
     if not fuchsia_device.authorized_file:
-        raise ValueError('A ssh authorized_file must be present in the '
-                         'ACTS config to flash fuchsia_devices.')
+        raise ValueError(
+            "A ssh authorized_file must be present in the "
+            "ACTS config to flash fuchsia_devices."
+        )
     # This is the product type from the fx set command.
     # Do 'fx list-products' to see options in Fuchsia source tree.
     if not fuchsia_device.product_type:
-        raise ValueError('A product type must be specified to flash '
-                         'fuchsia_devices.')
+        raise ValueError(
+            "A product type must be specified to flash " "fuchsia_devices."
+        )
     # This is the board type from the fx set command.
     # Do 'fx list-boards' to see options in Fuchsia source tree.
     if not fuchsia_device.board_type:
-        raise ValueError('A board type must be specified to flash '
-                         'fuchsia_devices.')
+        raise ValueError("A board type must be specified to flash " "fuchsia_devices.")
     if not fuchsia_device.build_number:
-        fuchsia_device.build_number = 'LATEST'
+        fuchsia_device.build_number = "LATEST"
     if not fuchsia_device.mdns_name:
         raise ValueError(
-            'Either fuchsia_device mdns_name must be specified or '
-            'ip must be the mDNS name to be able to flash.')
+            "Either fuchsia_device mdns_name must be specified or "
+            "ip must be the mDNS name to be able to flash."
+        )
 
     file_to_download = None
     image_archive_path = None
@@ -73,34 +74,35 @@
     if not fuchsia_device.specific_image:
         product_build = fuchsia_device.product_type
         if fuchsia_device.build_type:
-            product_build = f'{product_build}_{fuchsia_device.build_type}'
-        if 'LATEST' in fuchsia_device.build_number:
-            sdk_version = 'sdk'
-            if 'LATEST_F' in fuchsia_device.build_number:
-                f_branch = fuchsia_device.build_number.split('LATEST_F', 1)[1]
-                sdk_version = f'f{f_branch}_sdk'
+            product_build = f"{product_build}_{fuchsia_device.build_type}"
+        if "LATEST" in fuchsia_device.build_number:
+            sdk_version = "sdk"
+            if "LATEST_F" in fuchsia_device.build_number:
+                f_branch = fuchsia_device.build_number.split("LATEST_F", 1)[1]
+                sdk_version = f"f{f_branch}_sdk"
             file_to_download = (
-                f'{FUCHSIA_RELEASE_TESTING_URL}/'
-                f'{sdk_version}-{product_build}.{fuchsia_device.board_type}-release.tgz'
+                f"{FUCHSIA_RELEASE_TESTING_URL}/"
+                f"{sdk_version}-{product_build}.{fuchsia_device.board_type}-release.tgz"
             )
         else:
             # Must be a fully qualified build number (e.g. 5.20210721.4.1215)
             file_to_download = (
-                f'{FUCHSIA_SDK_URL}/{fuchsia_device.build_number}/images/'
-                f'{product_build}.{fuchsia_device.board_type}-release.tgz')
-    elif 'gs://' in fuchsia_device.specific_image:
+                f"{FUCHSIA_SDK_URL}/{fuchsia_device.build_number}/images/"
+                f"{product_build}.{fuchsia_device.board_type}-release.tgz"
+            )
+    elif "gs://" in fuchsia_device.specific_image:
         file_to_download = fuchsia_device.specific_image
     elif os.path.isdir(fuchsia_device.specific_image):
         image_path = fuchsia_device.specific_image
     elif tarfile.is_tarfile(fuchsia_device.specific_image):
         image_archive_path = fuchsia_device.specific_image
     else:
-        raise ValueError(
-            f'Invalid specific_image "{fuchsia_device.specific_image}"')
+        raise ValueError(f'Invalid specific_image "{fuchsia_device.specific_image}"')
 
     if image_path:
-        reboot_to_bootloader(fuchsia_device, use_ssh,
-                             fuchsia_reconnect_after_reboot_time)
+        reboot_to_bootloader(
+            fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time
+        )
         logging.info(
             f'Flashing {fuchsia_device.mdns_name} with {image_path} using authorized keys "{fuchsia_device.authorized_file}".'
         )
@@ -109,18 +111,19 @@
         suffix = fuchsia_device.board_type
         with tempfile.TemporaryDirectory(suffix=suffix) as image_path:
             if file_to_download:
-                logging.info(f'Downloading {file_to_download} to {image_path}')
-                job.run(f'gsutil cp {file_to_download} {image_path}')
+                logging.info(f"Downloading {file_to_download} to {image_path}")
+                job.run(f"gsutil cp {file_to_download} {image_path}")
                 image_archive_path = os.path.join(
-                    image_path, os.path.basename(file_to_download))
+                    image_path, os.path.basename(file_to_download)
+                )
 
             if image_archive_path:
                 # Use tar command instead of tarfile.extractall, as it takes too long.
-                job.run(f'tar xfvz {image_archive_path} -C {image_path}',
-                        timeout=120)
+                job.run(f"tar xfvz {image_archive_path} -C {image_path}", timeout=120)
 
-            reboot_to_bootloader(fuchsia_device, use_ssh,
-                                 fuchsia_reconnect_after_reboot_time)
+            reboot_to_bootloader(
+                fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time
+            )
 
             logging.info(
                 f'Flashing {fuchsia_device.mdns_name} with {image_archive_path} using authorized keys "{fuchsia_device.authorized_file}".'
@@ -129,15 +132,15 @@
     return True
 
 
-def reboot_to_bootloader(fuchsia_device,
-                         use_ssh=False,
-                         fuchsia_reconnect_after_reboot_time=5):
+def reboot_to_bootloader(
+    fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5
+):
     import psutil
     import usbinfo
+    from antlion.controllers.fuchsia_lib.ssh import SSHError
 
     if use_ssh:
-        logging.info('Sending reboot command via SSH to '
-                     'get into bootloader.')
+        logging.info("Sending reboot command via SSH to " "get into bootloader.")
         # Sending this command will put the device in fastboot
         # but it does not guarantee the device will be in fastboot
         # after this command.  There is no check so if there is an
@@ -145,9 +148,10 @@
         # other check needs to be done.
         try:
             fuchsia_device.ssh.run(
-                'dm rb', timeout_sec=fuchsia_reconnect_after_reboot_time)
-        except FuchsiaSSHError as e:
-            if 'closed by remote host' not in e.result.stderr:
+                "dm rb", timeout_sec=fuchsia_reconnect_after_reboot_time
+            )
+        except SSHError as e:
+            if "closed by remote host" not in e.result.stderr:
                 raise e
     else:
         pass
@@ -155,25 +159,35 @@
 
     time_counter = 0
     while time_counter < FASTBOOT_TIMEOUT:
-        logging.info('Checking to see if fuchsia_device(%s) SN: %s is in '
-                     'fastboot. (Attempt #%s Timeout: %s)' %
-                     (fuchsia_device.mdns_name, fuchsia_device.serial_number,
-                      str(time_counter + 1), FASTBOOT_TIMEOUT))
+        logging.info(
+            "Checking to see if fuchsia_device(%s) SN: %s is in "
+            "fastboot. (Attempt #%s Timeout: %s)"
+            % (
+                fuchsia_device.mdns_name,
+                fuchsia_device.serial_number,
+                str(time_counter + 1),
+                FASTBOOT_TIMEOUT,
+            )
+        )
         for usb_device in usbinfo.usbinfo():
-            if (usb_device['iSerialNumber'] == fuchsia_device.serial_number
-                    and usb_device['iProduct'] == 'USB_download_gadget'):
+            if (
+                usb_device["iSerialNumber"] == fuchsia_device.serial_number
+                and usb_device["iProduct"] == "USB_download_gadget"
+            ):
                 logging.info(
-                    'fuchsia_device(%s) SN: %s is in fastboot.' %
-                    (fuchsia_device.mdns_name, fuchsia_device.serial_number))
+                    "fuchsia_device(%s) SN: %s is in fastboot."
+                    % (fuchsia_device.mdns_name, fuchsia_device.serial_number)
+                )
                 time_counter = FASTBOOT_TIMEOUT
         time_counter = time_counter + 1
         if time_counter == FASTBOOT_TIMEOUT:
             for fail_usb_device in usbinfo.usbinfo():
                 logging.debug(fail_usb_device)
             raise TimeoutError(
-                'fuchsia_device(%s) SN: %s '
-                'never went into fastboot' %
-                (fuchsia_device.mdns_name, fuchsia_device.serial_number))
+                "fuchsia_device(%s) SN: %s "
+                "never went into fastboot"
+                % (fuchsia_device.mdns_name, fuchsia_device.serial_number)
+            )
         time.sleep(1)
 
     end_time = time.time() + WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC
@@ -182,8 +196,7 @@
         flash_process_found = False
         for proc in psutil.process_iter():
             if "bash" in proc.name() and "flash.sh" in proc.cmdline():
-                logging.info(
-                    "Waiting for existing flash.sh process to complete.")
+                logging.info("Waiting for existing flash.sh process to complete.")
                 time.sleep(PROCESS_CHECK_WAIT_TIME_SEC)
                 flash_process_found = True
         if not flash_process_found:
@@ -193,16 +206,19 @@
 def run_flash_script(fuchsia_device, flash_dir):
     try:
         flash_output = job.run(
-            f'bash {flash_dir}/flash.sh --ssh-key={fuchsia_device.authorized_file} -s {fuchsia_device.serial_number}',
-            timeout=120)
+            f"bash {flash_dir}/flash.sh --ssh-key={fuchsia_device.authorized_file} -s {fuchsia_device.serial_number}",
+            timeout=FLASH_TIMEOUT_SEC,
+        )
         logging.debug(flash_output.stderr)
     except job.TimeoutError as err:
         raise TimeoutError(err)
 
-    logging.info('Waiting %s seconds for device'
-                 ' to come back up after flashing.' % AFTER_FLASH_BOOT_TIME)
+    logging.info(
+        "Waiting %s seconds for device"
+        " to come back up after flashing." % AFTER_FLASH_BOOT_TIME
+    )
     time.sleep(AFTER_FLASH_BOOT_TIME)
-    logging.info('Updating device to new IP addresses.')
+    logging.info("Updating device to new IP addresses.")
     mdns_ip = None
     for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
         mdns_ip = get_fuchsia_mdns_ipv6_address(fuchsia_device.mdns_name)
@@ -211,38 +227,13 @@
         else:
             time.sleep(1)
     if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
-        logging.info('IP for fuchsia_device(%s) changed from %s to %s' %
-                     (fuchsia_device.mdns_name, fuchsia_device.ip, mdns_ip))
+        logging.info(
+            "IP for fuchsia_device(%s) changed from %s to %s"
+            % (fuchsia_device.mdns_name, fuchsia_device.ip, mdns_ip)
+        )
         fuchsia_device.ip = mdns_ip
         fuchsia_device.address = "http://[{}]:{}".format(
-            fuchsia_device.ip, fuchsia_device.sl4f_port)
+            fuchsia_device.ip, fuchsia_device.sl4f_port
+        )
     else:
-        raise ValueError('Invalid IP: %s after flashing.' %
-                         fuchsia_device.mdns_name)
-
-
-def wait_for_port(host: str, port: int, timeout_sec: int = 5) -> None:
-    """Wait for the host to start accepting connections on the port.
-
-    Some services take some time to start. Call this after launching the service
-    to avoid race conditions.
-
-    Args:
-        host: IP of the running service.
-        port: Port of the running service.
-        timeout_sec: Seconds to wait until raising TimeoutError
-
-    Raises:
-        TimeoutError: when timeout_sec has expired without a successful
-            connection to the service
-    """
-    timeout = time.perf_counter() + timeout_sec
-    while True:
-        try:
-            with socket.create_connection((host, port), timeout=timeout_sec):
-                return
-        except ConnectionRefusedError as e:
-            if time.perf_counter() > timeout:
-                raise TimeoutError(
-                    f'Waited over {timeout_sec}s for the service to start '
-                    f'accepting connections at {host}:{port}') from e
+        raise ValueError("Invalid IP: %s after flashing." % fuchsia_device.mdns_name)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
index 07ff2f8..54486d9 100644
--- a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
@@ -18,26 +18,26 @@
 
 
 class FuchsiaWlanApPolicyLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "wlan_ap_policy")
 
-    def wlanStartAccessPoint(self, target_ssid, security_type, target_pwd,
-                             connectivity_mode, operating_band):
-        """ Start an Access Point.
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network. Possible inputs:
-                    	"none", "wep", "wpa", "wpa2", "wpa3"
-                    target_pwd: (optional) credential being saved with the network. No password
-                                is equivalent to empty string.
-                    connectivity_mode: the connectivity mode to use. Possible inputs:
-                    	"local_only", "unrestricted"
-                    operating_band: The operating band to use. Possible inputs:
-                    	"any", "only_2_4_ghz", "only_5_ghz"
+    def wlanStartAccessPoint(
+        self, target_ssid, security_type, target_pwd, connectivity_mode, operating_band
+    ):
+        """Start an Access Point.
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network. Possible inputs:
+                "none", "wep", "wpa", "wpa2", "wpa3"
+            target_pwd: (optional) credential being saved with the network. No password
+                        is equivalent to empty string.
+            connectivity_mode: the connectivity mode to use. Possible inputs:
+                "local_only", "unrestricted"
+            operating_band: The operating band to use. Possible inputs:
+                "any", "only_2_4_ghz", "only_5_ghz"
 
-                Returns:
-                    boolean indicating if the action was successful
+        Returns:
+            boolean indicating if the action was successful
         """
 
         test_cmd = "wlan_ap_policy.start_access_point"
@@ -53,15 +53,15 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanStopAccessPoint(self, target_ssid, security_type, target_pwd=""):
-        """ Stops an active Access Point.
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network
-                    target_pwd: (optional) credential being saved with the network. No password
-                                is equivalent to empty string.
+        """Stops an active Access Point.
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network
+            target_pwd: (optional) credential being saved with the network. No password
+                        is equivalent to empty string.
 
-                Returns:
-                    boolean indicating if the action was successful
+        Returns:
+            boolean indicating if the action was successful
         """
 
         test_cmd = "wlan_ap_policy.stop_access_point"
@@ -69,16 +69,16 @@
         test_args = {
             "target_ssid": target_ssid,
             "security_type": security_type.lower(),
-            "target_pwd": target_pwd
+            "target_pwd": target_pwd,
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanStopAllAccessPoint(self):
-        """ Stops all Access Points
+        """Stops all Access Points
 
-                Returns:
-                    boolean indicating if the actions were successful
+        Returns:
+            boolean indicating if the actions were successful
         """
 
         test_cmd = "wlan_ap_policy.stop_all_access_points"
@@ -88,22 +88,22 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanSetNewListener(self):
-        """ Sets the update listener stream of the facade to a new stream so that updates will be
-            reset. Intended to be used between tests so that the behaviour of updates in a test is
-            independent from previous tests.
+        """Sets the update listener stream of the facade to a new stream so that updates will be
+        reset. Intended to be used between tests so that the behaviour of updates in a test is
+        independent from previous tests.
         """
         test_cmd = "wlan_ap_policy.set_new_update_listener"
 
         return self.send_command(test_cmd, {})
 
     def wlanGetUpdate(self, timeout=30):
-        """ Gets a list of AP state updates. This call will return with an update immediately the
-            first time the update listener is initialized by setting a new listener or by creating
-            a client controller before setting a new listener. Subsequent calls will hang until
-            there is an update.
-            Returns:
-                A list of AP state updated. If there is no error, the result is a list with a
-                structure that matches the FIDL AccessPointState struct given for updates.
+        """Gets a list of AP state updates. This call will return with an update immediately the
+        first time the update listener is initialized by setting a new listener or by creating
+        a client controller before setting a new listener. Subsequent calls will hang until
+        there is an update.
+        Returns:
+            A list of AP state updated. If there is no error, the result is a list with a
+            structure that matches the FIDL AccessPointState struct given for updates.
         """
         test_cmd = "wlan_ap_policy.get_update"
 
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
index d229049..a53698b 100644
--- a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
@@ -19,18 +19,17 @@
 
 
 class FuchsiaWlanDeprecatedConfigurationLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "wlan_deprecated")
 
     def wlanSuggestAccessPointMacAddress(self, addr):
-        """ Suggests a mac address to soft AP interface, to support
+        """Suggests a mac address to soft AP interface, to support
         cast legacy behavior.
 
         Args:
             addr: string of mac address to suggest (e.g. '12:34:56:78:9a:bc')
         """
-        test_cmd = 'wlan_deprecated.suggest_ap_mac'
-        test_args = {'mac': addr}
+        test_cmd = "wlan_deprecated.suggest_ap_mac"
+        test_args = {"mac": addr}
 
         return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_lib.py
index c68fa11..9ed274a 100644
--- a/src/antlion/controllers/fuchsia_lib/wlan_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/wlan_lib.py
@@ -30,12 +30,11 @@
 
 
 class FuchsiaWlanLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "wlan")
 
     def wlanStartScan(self):
-        """ Starts a wlan scan
+        """Starts a wlan scan
 
         Returns:
             scan results
@@ -45,7 +44,7 @@
         return self.send_command(test_cmd, {})
 
     def wlanScanForBSSInfo(self):
-        """ Scans and returns BSS info
+        """Scans and returns BSS info
 
         Returns:
             A dict mapping each seen SSID to a list of BSS Description IE
@@ -55,11 +54,8 @@
 
         return self.send_command(test_cmd, {})
 
-    def wlanConnectToNetwork(self,
-                             target_ssid,
-                             target_bss_desc,
-                             target_pwd=None):
-        """ Triggers a network connection
+    def wlanConnectToNetwork(self, target_ssid, target_bss_desc, target_pwd=None):
+        """Triggers a network connection
         Args:
             target_ssid: the network to attempt a connection to
             target_pwd: (optional) password for the target network
@@ -71,19 +67,19 @@
         test_args = {
             "target_ssid": target_ssid,
             "target_pwd": target_pwd,
-            "target_bss_desc": target_bss_desc
+            "target_bss_desc": target_bss_desc,
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanDisconnect(self):
-        """ Disconnect any current wifi connections"""
+        """Disconnect any current wifi connections"""
         test_cmd = COMMAND_DISCONNECT
 
         return self.send_command(test_cmd, {})
 
     def wlanDestroyIface(self, iface_id):
-        """ Destroy WLAN interface by ID.
+        """Destroy WLAN interface by ID.
         Args:
             iface_id: the interface id.
 
@@ -96,7 +92,7 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanGetIfaceIdList(self):
-        """ Get a list if wlan interface IDs.
+        """Get a list if wlan interface IDs.
 
         Returns:
             Dictionary, service id if success, error if error.
@@ -106,7 +102,7 @@
         return self.send_command(test_cmd, {})
 
     def wlanPhyIdList(self):
-        """ Get a list if wlan phy IDs.
+        """Get a list if wlan phy IDs.
 
         Returns:
             List of IDs if success, error if error.
@@ -116,7 +112,7 @@
         return self.send_command(test_cmd, {})
 
     def wlanStatus(self, iface_id=None):
-        """ Request connection status
+        """Request connection status
 
         Args:
             iface_id: unsigned 16-bit int, the wlan interface id
@@ -129,12 +125,12 @@
         test_cmd = COMMAND_STATUS
         test_args = {}
         if iface_id:
-            test_args = {'iface_id': iface_id}
+            test_args = {"iface_id": iface_id}
 
         return self.send_command(test_cmd, test_args)
 
     def wlanGetCountry(self, phy_id):
-        """ Reads the currently configured country for `phy_id`.
+        """Reads the currently configured country for `phy_id`.
 
         Args:
             phy_id: unsigned 16-bit integer.
@@ -148,7 +144,7 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanGetDevPath(self, phy_id):
-        """ Queries the device path for `phy_id`.
+        """Queries the device path for `phy_id`.
 
         Args:
             phy_id: unsigned 16-bit integer.
@@ -162,7 +158,7 @@
         return self.send_command(test_cmd, test_args)
 
     def wlanQueryInterface(self, iface_id):
-        """ Retrieves interface info for given wlan iface id.
+        """Retrieves interface info for given wlan iface id.
 
         Args:
             iface_id: unsigned 16-bit int, the wlan interface id.
@@ -172,6 +168,6 @@
             and mac addr.
         """
         test_cmd = COMMAND_QUERY_IFACE
-        test_args = {'iface_id': iface_id}
+        test_args = {"iface_id": iface_id}
 
         return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
index 37e70fa..94701d7 100644
--- a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
+++ b/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
@@ -35,79 +35,78 @@
 
 
 class FuchsiaWlanPolicyLib(BaseLib):
-
     def __init__(self, addr: str) -> None:
         super().__init__(addr, "wlan_policy")
 
     def wlanStartClientConnections(self):
-        """ Enables device to initiate connections to networks """
+        """Enables device to initiate connections to networks"""
 
         test_cmd = COMMAND_START_CLIENT_CONNECTIONS
 
         return self.send_command(test_cmd, {})
 
     def wlanStopClientConnections(self):
-        """ Disables device for initiating connections to networks """
+        """Disables device for initiating connections to networks"""
 
         test_cmd = COMMAND_STOP_CLIENT_CONNECTIONS
 
         return self.send_command(test_cmd, {})
 
     def wlanScanForNetworks(self):
-        """ Scans for networks that can be connected to
-                Returns:
-                    A list of network names and security types
-         """
+        """Scans for networks that can be connected to
+        Returns:
+            A list of network names and security types
+        """
 
         test_cmd = COMMAND_SCAN_FOR_NETWORKS
 
         return self.send_command(test_cmd, {})
 
     def wlanSaveNetwork(self, target_ssid, security_type, target_pwd=None):
-        """ Saveds a network to the device for future connections
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network
-                    target_pwd: (optional) credential being saved with the network. No password
-                                is equivalent to empty string.
+        """Saveds a network to the device for future connections
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network
+            target_pwd: (optional) credential being saved with the network. No password
+                        is equivalent to empty string.
 
-                Returns:
-                    boolean indicating if the connection was successful
+        Returns:
+            boolean indicating if the connection was successful
         """
         if not target_pwd:
-            target_pwd = ''
+            target_pwd = ""
         test_cmd = COMMAND_SAVE_NETWORK
         test_args = {
             "target_ssid": target_ssid,
             "security_type": str(security_type).lower(),
-            "target_pwd": target_pwd
+            "target_pwd": target_pwd,
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanRemoveNetwork(self, target_ssid, security_type, target_pwd=None):
-        """ Removes or "forgets" a network from saved networks
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network
-                    target_pwd: (optional) credential of the network to remove. No password and
-                                empty string are equivalent.
+        """Removes or "forgets" a network from saved networks
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network
+            target_pwd: (optional) credential of the network to remove. No password and
+                        empty string are equivalent.
         """
         if not target_pwd:
-            target_pwd = ''
+            target_pwd = ""
         test_cmd = COMMAND_REMOVE_NETWORK
         test_args = {
             "target_ssid": target_ssid,
             "security_type": str(security_type).lower(),
-            "target_pwd": target_pwd
+            "target_pwd": target_pwd,
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanRemoveAllNetworks(self):
-        """ Removes or "forgets" all networks from saved networks
-                Returns:
-                    A boolean indicating if the action was successful
+        """Removes or "forgets" all networks from saved networks
+        Returns:
+            A boolean indicating if the action was successful
         """
 
         test_cmd = COMMAND_REMOVE_ALL_NETWORKS
@@ -115,10 +114,10 @@
         return self.send_command(test_cmd, {})
 
     def wlanGetSavedNetworks(self):
-        """ Gets networks saved on device. Any PSK of a saved network will be
-            lower case regardless of how it was saved.
-                Returns:
-                    A list of saved network names and security protocols
+        """Gets networks saved on device. Any PSK of a saved network will be
+        lower case regardless of how it was saved.
+            Returns:
+                A list of saved network names and security protocols
         """
 
         test_cmd = COMMAND_GET_SAVED_NETWORKS
@@ -126,57 +125,57 @@
         return self.send_command(test_cmd, {})
 
     def wlanConnect(self, target_ssid, security_type):
-        """ Triggers connection to a network
-                Args:
-                    target_ssid: the network to attempt a connection to. Must have been previously
-                                 saved in order for a successful connection to happen.
-                    security_type: the security protocol of the network
+        """Triggers connection to a network
+            Args:
+                target_ssid: the network to attempt a connection to. Must have been previously
+                             saved in order for a successful connection to happen.
+                security_type: the security protocol of the network
 
-            Returns:
-                    boolean indicating if the connection was successful
+        Returns:
+                boolean indicating if the connection was successful
         """
 
         test_cmd = COMMAND_CONNECT
         test_args = {
             "target_ssid": target_ssid,
-            "security_type": str(security_type).lower()
+            "security_type": str(security_type).lower(),
         }
 
         return self.send_command(test_cmd, test_args)
 
     def wlanCreateClientController(self):
-        """ Initializes the client controller of the facade that is used to make Client Controller
-            API calls
+        """Initializes the client controller of the facade that is used to make Client Controller
+        API calls
         """
         test_cmd = COMMAND_CREATE_CLIENT_CONTROLLER
 
         return self.send_command(test_cmd, {})
 
     def wlanSetNewListener(self):
-        """ Sets the update listener stream of the facade to a new stream so that updates will be
-            reset. Intended to be used between tests so that the behaviour of updates in a test is
-            independent from previous tests.
+        """Sets the update listener stream of the facade to a new stream so that updates will be
+        reset. Intended to be used between tests so that the behaviour of updates in a test is
+        independent from previous tests.
         """
         test_cmd = COMMAND_SET_NEW_LISTENER
 
         return self.send_command(test_cmd, {})
 
     def wlanRemoveAllNetworks(self):
-        """ Deletes all saved networks on the device. Relies directly on the get_saved_networks and
-            remove_network commands
+        """Deletes all saved networks on the device. Relies directly on the get_saved_networks and
+        remove_network commands
         """
         test_cmd = COMMAND_REMOVE_ALL_NETWORKS
 
         return self.send_command(test_cmd, {})
 
     def wlanGetUpdate(self, timeout=30):
-        """ Gets one client listener update. This call will return with an update immediately the
-            first time the update listener is initialized by setting a new listener or by creating
-            a client controller before setting a new listener. Subsequent calls will hang until
-            there is an update.
-            Returns:
-                An update of connection status. If there is no error, the result is a dict with a
-                structure that matches the FIDL ClientStateSummary struct given for updates.
+        """Gets one client listener update. This call will return with an update immediately the
+        first time the update listener is initialized by setting a new listener or by creating
+        a client controller before setting a new listener. Subsequent calls will hang until
+        there is an update.
+        Returns:
+            An update of connection status. If there is no error, the result is a dict with a
+            structure that matches the FIDL ClientStateSummary struct given for updates.
         """
         test_cmd = COMMAND_GET_UPDATE
 
diff --git a/src/antlion/controllers/gnss_lib/GnssSimulator.py b/src/antlion/controllers/gnss_lib/GnssSimulator.py
deleted file mode 100644
index 69cad04..0000000
--- a/src/antlion/controllers/gnss_lib/GnssSimulator.py
+++ /dev/null
@@ -1,200 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Python module for General abstract GNSS Simulator.
-@author: Clay Liao (jianhsiungliao@)
-"""
-from time import sleep
-from antlion.controllers.spectracom_lib import gsg6
-from antlion.controllers.spirent_lib import gss7000
-from antlion import logger
-from antlion.utils import ping
-from antlion.libs.proc import job
-
-
-class AbstractGnssSimulator:
-    """General abstract GNSS Simulator"""
-
-    def __init__(self, simulator, ip_addr, ip_port, ip_port_ctrl=7717):
-        """Init AbstractGnssSimulator
-
-        Args:
-            simulator: GNSS simulator name,
-                Type, str
-                Option 'gss7000/gsg6'
-            ip_addr: IP Address.
-                Type, str
-            ip_port: TCPIP Port,
-                Type, str
-            ip_port_ctrl: TCPIP port,
-                Type, int
-                Default, 7717
-        """
-        self.simulator_name = str(simulator).lower()
-        self.ip_addr = ip_addr
-        self.ip_port = ip_port
-        self.ip_port_ctrl = ip_port_ctrl
-        self._logger = logger.create_tagged_trace_logger(
-            '%s %s:%s' % (simulator, self.ip_addr, self.ip_port))
-        if self.simulator_name == 'gsg6':
-            self._logger.info('GNSS simulator is GSG6')
-            self.simulator = gsg6.GSG6(self.ip_addr, self.ip_port)
-        elif self.simulator_name == 'gss7000':
-            self._logger.info('GNSS simulator is GSS7000')
-            self.simulator = gss7000.GSS7000(self.ip_addr, self.ip_port,
-                                             self.ip_port_ctrl)
-        else:
-            self._logger.error('No matched GNSS simulator')
-            raise AttributeError(
-                'The GNSS simulator in config file is {} which is not supported.'
-                .format(self.simulator_name))
-
-    def connect(self):
-        """Connect to GNSS Simulator"""
-        self._logger.debug('Connect to GNSS Simulator {}'.format(
-            self.simulator_name.upper()))
-        self.simulator.connect()
-
-    def close(self):
-        """Disconnect from GNSS Simulator"""
-        self._logger.debug('Disconnect from GNSS Simulator {}'.format(
-            self.simulator_name.upper()))
-        self.simulator.close()
-
-    def start_scenario(self, scenario=''):
-        """Start the running scenario.
-
-        Args:
-            scenario: path of scenario,
-                Type, str
-        """
-        self._logger.info('Start GNSS Scenario {}'.format(scenario))
-        self.simulator.start_scenario(scenario)
-
-    def stop_scenario(self):
-        """Stop the running scenario."""
-        self._logger.debug('Stop playing scenario')
-        self.simulator.stop_scenario()
-
-    def set_power(self, power_level=-130):
-        """Set scenario power level.
-        Args:
-            power_level: target power level in dBm for gsg6 or gss7000,
-                gsg6 power_level range is [-160, -65],
-                gss7000 power_level range is [-170, -115]
-                Type, float,
-        """
-        self.simulator.set_power(power_level)
-
-    def set_power_offset(self, gss7000_ant=1, pwr_offset=0):
-        """Set scenario power level offset based on reference level.
-           The default reference level is -130dBm for GPS L1.
-        Args:
-            ant: target gss7000 RF port,
-                Type, int
-            pwr_offset: target power offset in dB,
-                Type, float
-        """
-        if self.simulator_name == 'gsg6':
-            power_level = -130 + pwr_offset
-            self.simulator.set_power(power_level)
-        elif self.simulator_name == 'gss7000':
-            self.simulator.set_power_offset(gss7000_ant, pwr_offset)
-        else:
-            self._logger.error('No GNSS simulator is available')
-
-    def set_scenario_power(self,
-                           power_level,
-                           sat_id='',
-                           sat_system='',
-                           freq_band=''):
-        """Set dynamic power for the running scenario.
-
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-            sat_id: set power level for specific satellite identifiers
-                Type, str.
-                Option
-                    For GSG-6: 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                    where xx is satellite identifiers number
-                    e.g.: G10
-                    For GSS7000: Provide SVID.
-                Default, '', assumed All.
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
-                Default, '', assumed All.
-            freq_band: Frequency band to set the power level
-                Type, str
-                Default, '', assumed to be L1.
-         Raises:
-            RuntimeError: raise when instrument does not support this function.
-        """
-        self.simulator.set_scenario_power(power_level=power_level,
-                                          sat_id=sat_id,
-                                          sat_system=sat_system,
-                                          freq_band=freq_band)
-
-    def toggle_scenario_power(self,
-                              toggle_onoff='ON',
-                              sat_id='',
-                              sat_system=''):
-        """Toggle ON OFF scenario.
-
-        Args:
-            toggle_onoff: turn on or off the satellites
-                Type, str. Option ON/OFF
-                Default, 'ON'
-            sat_id: satellite identifiers
-                Type, str.
-                Option 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                where xx is satellite identifiers no.
-                e.g.: G10
-            sat_system: to toggle On/OFF for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL'
-        """
-        # TODO: [b/208719212] Currently only support GSG-6. Will implement GSS7000 feature.
-        if self.simulator_name == 'gsg6':
-            self.simulator.toggle_scenario_power(toggle_onoff=toggle_onoff,
-                                                 sat_id=sat_id,
-                                                 sat_system=sat_system)
-        else:
-            raise RuntimeError('{} does not support this function'.format(
-                self.simulator_name))
-
-    def ping_inst(self, retry=3, wait=1):
-        """Ping IP of instrument to check if the connection is stable.
-        Args:
-            retry: Retry times.
-                Type, int.
-                Default, 3.
-            wait: Wait time between each ping command when ping fail is met.
-                Type, int.
-                Default, 1.
-        Return:
-            True/False of ping result.
-        """
-        for i in range(retry):
-            ret = ping(job, self.ip_addr)
-            self._logger.debug(f'Ping return results: {ret}')
-            if ret.get('packet_loss') == '0':
-                return True
-            self._logger.warning(f'Fail to ping GNSS Simulator: {i+1}')
-            sleep(wait)
-        return False
diff --git a/src/antlion/controllers/gnss_lib/__init__.py b/src/antlion/controllers/gnss_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/gnss_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/iperf_client.py b/src/antlion/controllers/iperf_client.py
index c4d8e1d..9ad6efc 100644
--- a/src/antlion/controllers/iperf_client.py
+++ b/src/antlion/controllers/iperf_client.py
@@ -30,8 +30,8 @@
 from antlion.controllers.utils_lib.ssh import settings
 from antlion.libs.proc import job
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'IPerfClient'
-ACTS_CONTROLLER_REFERENCE_NAME = 'iperf_clients'
+MOBLY_CONTROLLER_CONFIG_NAME = "IPerfClient"
+ACTS_CONTROLLER_REFERENCE_NAME = "iperf_clients"
 
 
 class IPerfError(Exception):
@@ -51,14 +51,18 @@
     """
     results = []
     for c in configs:
-        if type(c) is dict and 'AndroidDevice' in c:
+        if type(c) is dict and "AndroidDevice" in c:
             results.append(
-                IPerfClientOverAdb(c['AndroidDevice'],
-                                   test_interface=c.get('test_interface')))
-        elif type(c) is dict and 'ssh_config' in c:
+                IPerfClientOverAdb(
+                    c["AndroidDevice"], test_interface=c.get("test_interface")
+                )
+            )
+        elif type(c) is dict and "ssh_config" in c:
             results.append(
-                IPerfClientOverSsh(c['ssh_config'],
-                                   test_interface=c.get('test_interface')))
+                IPerfClientOverSsh(
+                    c["ssh_config"], test_interface=c.get("test_interface")
+                )
+            )
         else:
             results.append(IPerfClient())
     return results
@@ -85,6 +89,7 @@
     multiple IPerfClients from writing results to the same file, as well
     as providing the interface for IPerfClient objects.
     """
+
     # Keeps track of the number of IPerfClient logs to prevent file name
     # collisions.
     __log_file_counter = 0
@@ -92,7 +97,7 @@
     __log_file_lock = threading.Lock()
 
     @staticmethod
-    def _get_full_file_path(tag=''):
+    def _get_full_file_path(tag=""):
         """Returns the full file path for the IPerfClient log file.
 
         Note: If the directory for the file path does not exist, it will be
@@ -102,14 +107,16 @@
             tag: The tag passed in to the server run.
         """
         current_context = context.get_current_context()
-        full_out_dir = os.path.join(current_context.get_full_output_path(),
-                                    'iperf_client_files')
+        full_out_dir = os.path.join(
+            current_context.get_full_output_path(), "iperf_client_files"
+        )
 
         with IPerfClientBase.__log_file_lock:
             os.makedirs(full_out_dir, exist_ok=True)
-            tags = ['IPerfClient', tag, IPerfClientBase.__log_file_counter]
-            out_file_name = '%s.log' % (','.join(
-                [str(x) for x in tags if x != '' and x is not None]))
+            tags = ["IPerfClient", tag, IPerfClientBase.__log_file_counter]
+            out_file_name = "%s.log" % (
+                ",".join([str(x) for x in tags if x != "" and x is not None])
+            )
             IPerfClientBase.__log_file_counter += 1
 
         return os.path.join(full_out_dir, out_file_name)
@@ -129,7 +136,7 @@
         Returns:
             full_out_path: iperf result path.
         """
-        raise NotImplementedError('start() must be implemented.')
+        raise NotImplementedError("start() must be implemented.")
 
 
 class IPerfClient(IPerfClientBase):
@@ -151,15 +158,16 @@
             full_out_path: iperf result path.
         """
         if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
         else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_cmd = [str(iperf_binary), '-c', ip] + iperf_args.split(' ')
+            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+        iperf_cmd = [str(iperf_binary), "-c", ip] + iperf_args.split(" ")
         full_out_path = self._get_full_file_path(tag)
 
-        with open(full_out_path, 'w') as out_file:
+        with open(full_out_path, "w") as out_file:
             subprocess.call(iperf_cmd, stdout=out_file)
 
         return full_out_path
@@ -168,17 +176,22 @@
 class IPerfClientOverSsh(IPerfClientBase):
     """Class that handles iperf3 client operations on remote machines."""
 
-    def __init__(self,
-                 ssh_config: str,
-                 test_interface: str = None,
-                 ssh_provider: SSHProvider = None):
+    def __init__(
+        self,
+        ssh_config: str,
+        test_interface: str = None,
+        ssh_provider: SSHProvider = None,
+    ):
         self._ssh_provider = ssh_provider
         if not self._ssh_provider:
             self._ssh_settings = settings.from_config(ssh_config)
-            if not (utils.is_valid_ipv4_address(self._ssh_settings.hostname) or
-                    utils.is_valid_ipv6_address(self._ssh_settings.hostname)):
+            if not (
+                utils.is_valid_ipv4_address(self._ssh_settings.hostname)
+                or utils.is_valid_ipv6_address(self._ssh_settings.hostname)
+            ):
                 mdns_ip = utils.get_fuchsia_mdns_ipv6_address(
-                    self._ssh_settings.hostname)
+                    self._ssh_settings.hostname
+                )
                 if mdns_ip:
                     self._ssh_settings.hostname = mdns_ip
         self._ssh_session = None
@@ -202,30 +215,30 @@
             full_out_path: iperf result path.
         """
         if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
         else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_cmd = '{} -c {} {}'.format(iperf_binary, ip, iperf_args)
+            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+        iperf_cmd = "{} -c {} {}".format(iperf_binary, ip, iperf_args)
         full_out_path = self._get_full_file_path(tag)
 
         try:
             self.start_ssh()
             if self._ssh_provider:
-                iperf_process = self._ssh_provider.run(iperf_cmd,
-                                                       timeout_sec=timeout)
+                iperf_process = self._ssh_provider.run(iperf_cmd, timeout_sec=timeout)
             else:
-                iperf_process = self._ssh_session.run(iperf_cmd,
-                                                      timeout=timeout)
+                iperf_process = self._ssh_session.run(iperf_cmd, timeout=timeout)
             iperf_output = iperf_process.stdout
-            with open(full_out_path, 'w') as out_file:
+            with open(full_out_path, "w") as out_file:
                 out_file.write(iperf_output)
         except socket.timeout:
-            raise TimeoutError('Socket timeout. Timed out waiting for iperf '
-                               'client to finish.')
+            raise TimeoutError(
+                "Socket timeout. Timed out waiting for iperf " "client to finish."
+            )
         except Exception as err:
-            logging.exception('iperf run failed: {}'.format(err))
+            logging.exception("iperf run failed: {}".format(err))
 
         return full_out_path
 
@@ -269,7 +282,8 @@
             return self._android_device_or_serial
         else:
             return _AndroidDeviceBridge.android_devices()[
-                self._android_device_or_serial]
+                self._android_device_or_serial
+            ]
 
     def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
         """Starts iperf client, and waits for completion.
@@ -286,26 +300,25 @@
         Returns:
             The iperf result file path.
         """
-        clean_out = ''
+        clean_out = ""
         try:
             if not iperf_binary:
-                logging.debug('No iperf3 binary specified.  '
-                              'Assuming iperf3 is in the path.')
-                iperf_binary = 'iperf3'
+                logging.debug(
+                    "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+                )
+                iperf_binary = "iperf3"
             else:
-                logging.debug('Using iperf3 binary located at %s' %
-                              iperf_binary)
-            iperf_cmd = '{} -c {} {}'.format(iperf_binary, ip, iperf_args)
-            out = self._android_device.adb.shell(str(iperf_cmd),
-                                                 timeout=timeout)
-            clean_out = out.split('\n')
-            if 'error' in clean_out[0].lower():
+                logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+            iperf_cmd = "{} -c {} {}".format(iperf_binary, ip, iperf_args)
+            out = self._android_device.adb.shell(str(iperf_cmd), timeout=timeout)
+            clean_out = out.split("\n")
+            if "error" in clean_out[0].lower():
                 raise IPerfError(clean_out)
         except (job.TimeoutError, AdbCommandError):
-            logging.warning('TimeoutError: Iperf measurement failed.')
+            logging.warning("TimeoutError: Iperf measurement failed.")
 
         full_out_path = self._get_full_file_path(tag)
-        with open(full_out_path, 'w') as out_file:
-            out_file.write('\n'.join(clean_out))
+        with open(full_out_path, "w") as out_file:
+            out_file.write("\n".join(clean_out))
 
         return full_out_path
diff --git a/src/antlion/controllers/iperf_server.py b/src/antlion/controllers/iperf_server.py
index b1311ff..20dcfbf 100755
--- a/src/antlion/controllers/iperf_server.py
+++ b/src/antlion/controllers/iperf_server.py
@@ -35,8 +35,8 @@
 from antlion.event.event import TestClassEndEvent
 from antlion.libs.proc import job
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'IPerfServer'
-ACTS_CONTROLLER_REFERENCE_NAME = 'iperf_servers'
+MOBLY_CONTROLLER_CONFIG_NAME = "IPerfServer"
+ACTS_CONTROLLER_REFERENCE_NAME = "iperf_servers"
 KILOBITS = 1024
 MEGABITS = KILOBITS * 1024
 GIGABITS = MEGABITS * 1024
@@ -44,7 +44,7 @@
 
 
 def create(configs):
-    """ Factory method for iperf servers.
+    """Factory method for iperf servers.
 
     The function creates iperf servers based on at least one config.
     If configs only specify a port number, a regular local IPerfServer object
@@ -58,18 +58,22 @@
     for c in configs:
         if type(c) in (str, int) and str(c).isdigit():
             results.append(IPerfServer(int(c)))
-        elif type(c) is dict and 'AndroidDevice' in c and 'port' in c:
-            results.append(IPerfServerOverAdb(c['AndroidDevice'], c['port']))
-        elif type(c) is dict and 'ssh_config' in c and 'port' in c:
+        elif type(c) is dict and "AndroidDevice" in c and "port" in c:
+            results.append(IPerfServerOverAdb(c["AndroidDevice"], c["port"]))
+        elif type(c) is dict and "ssh_config" in c and "port" in c:
             results.append(
-                IPerfServerOverSsh(c['ssh_config'],
-                                   c['port'],
-                                   test_interface=c.get('test_interface'),
-                                   use_killall=c.get('use_killall')))
+                IPerfServerOverSsh(
+                    settings.from_config(c["ssh_config"]),
+                    c["port"],
+                    test_interface=c.get("test_interface"),
+                    use_killall=c.get("use_killall"),
+                )
+            )
         else:
             raise ValueError(
-                'Config entry %s in %s is not a valid IPerfServer '
-                'config.' % (repr(c), configs))
+                "Config entry %s in %s is not a valid IPerfServer "
+                "config." % (repr(c), configs)
+            )
     return results
 
 
@@ -87,12 +91,11 @@
         try:
             iperf_server.stop()
         except Exception:
-            logging.exception('Unable to properly clean up %s.' % iperf_server)
+            logging.exception("Unable to properly clean up %s." % iperf_server)
 
 
 class IPerfResult(object):
-
-    def __init__(self, result_path, reporting_speed_units='Mbytes'):
+    def __init__(self, result_path, reporting_speed_units="Mbytes"):
         """Loads iperf result from file.
 
         Loads iperf result from JSON formatted server log. File can be accessed
@@ -106,20 +109,19 @@
             self.result = json.loads(result_path)
         else:
             try:
-                with open(result_path, 'r') as f:
+                with open(result_path, "r") as f:
                     iperf_output = f.readlines()
-                    if '}\n' in iperf_output:
-                        iperf_output = iperf_output[:iperf_output.index('}\n'
-                                                                        ) + 1]
-                    iperf_string = ''.join(iperf_output)
-                    iperf_string = iperf_string.replace('nan', '0')
+                    if "}\n" in iperf_output:
+                        iperf_output = iperf_output[: iperf_output.index("}\n") + 1]
+                    iperf_string = "".join(iperf_output)
+                    iperf_string = iperf_string.replace("nan", "0")
                     self.result = json.loads(iperf_string)
             except ValueError:
-                with open(result_path, 'r') as f:
+                with open(result_path, "r") as f:
                     # Possibly a result from interrupted iperf run,
                     # skip first line and try again.
                     lines = f.readlines()[1:]
-                    self.result = json.loads(''.join(lines))
+                    self.result = json.loads("".join(lines))
 
     def _has_data(self):
         """Checks if the iperf result has valid throughput data.
@@ -127,8 +129,9 @@
         Returns:
             True if the result contains throughput data. False otherwise.
         """
-        return ('end' in self.result) and ('sum_received' in self.result['end']
-                                           or 'sum' in self.result['end'])
+        return ("end" in self.result) and (
+            "sum_received" in self.result["end"] or "sum" in self.result["end"]
+        )
 
     def _get_reporting_speed(self, network_speed_in_bits_per_second):
         """Sets the units for the network speed reporting based on how the
@@ -146,13 +149,13 @@
             The value of the throughput in the appropriate units.
         """
         speed_divisor = 1
-        if self.reporting_speed_units[1:].lower() == 'bytes':
+        if self.reporting_speed_units[1:].lower() == "bytes":
             speed_divisor = speed_divisor * BITS_IN_BYTE
-        if self.reporting_speed_units[0:1].lower() == 'k':
+        if self.reporting_speed_units[0:1].lower() == "k":
             speed_divisor = speed_divisor * KILOBITS
-        if self.reporting_speed_units[0:1].lower() == 'm':
+        if self.reporting_speed_units[0:1].lower() == "m":
             speed_divisor = speed_divisor * MEGABITS
-        if self.reporting_speed_units[0:1].lower() == 'g':
+        if self.reporting_speed_units[0:1].lower() == "g":
             speed_divisor = speed_divisor * GIGABITS
         return network_speed_in_bits_per_second / speed_divisor
 
@@ -162,7 +165,7 @@
 
     @property
     def error(self):
-        return self.result.get('error', None)
+        return self.result.get("error", None)
 
     @property
     def avg_rate(self):
@@ -174,9 +177,9 @@
         quality of the link. If the result is not from a success run, this
         property is None.
         """
-        if not self._has_data() or 'sum' not in self.result['end']:
+        if not self._has_data() or "sum" not in self.result["end"]:
             return None
-        bps = self.result['end']['sum']['bits_per_second']
+        bps = self.result["end"]["sum"]["bits_per_second"]
         return self._get_reporting_speed(bps)
 
     @property
@@ -186,9 +189,9 @@
         This data may not exist if iperf was interrupted. If the result is not
         from a success run, this property is None.
         """
-        if not self._has_data() or 'sum_received' not in self.result['end']:
+        if not self._has_data() or "sum_received" not in self.result["end"]:
             return None
-        bps = self.result['end']['sum_received']['bits_per_second']
+        bps = self.result["end"]["sum_received"]["bits_per_second"]
         return self._get_reporting_speed(bps)
 
     @property
@@ -198,9 +201,9 @@
         This data may not exist if iperf was interrupted. If the result is not
         from a success run, this property is None.
         """
-        if not self._has_data() or 'sum_sent' not in self.result['end']:
+        if not self._has_data() or "sum_sent" not in self.result["end"]:
             return None
-        bps = self.result['end']['sum_sent']['bits_per_second']
+        bps = self.result["end"]["sum_sent"]["bits_per_second"]
         return self._get_reporting_speed(bps)
 
     @property
@@ -213,8 +216,8 @@
         if not self._has_data():
             return None
         intervals = [
-            self._get_reporting_speed(interval['sum']['bits_per_second'])
-            for interval in self.result['intervals']
+            self._get_reporting_speed(interval["sum"]["bits_per_second"])
+            for interval in self.result["intervals"]
         ]
         return intervals
 
@@ -245,13 +248,10 @@
         """
         if not self._has_data():
             return None
-        instantaneous_rates = self.instantaneous_rates[
-            iperf_ignored_interval:-1]
+        instantaneous_rates = self.instantaneous_rates[iperf_ignored_interval:-1]
         avg_rate = math.fsum(instantaneous_rates) / len(instantaneous_rates)
-        sqd_deviations = ([(rate - avg_rate)**2
-                           for rate in instantaneous_rates])
-        std_dev = math.sqrt(
-            math.fsum(sqd_deviations) / (len(sqd_deviations) - 1))
+        sqd_deviations = [(rate - avg_rate) ** 2 for rate in instantaneous_rates]
+        std_dev = math.sqrt(math.fsum(sqd_deviations) / (len(sqd_deviations) - 1))
         return std_dev
 
 
@@ -272,13 +272,13 @@
 
     @property
     def port(self):
-        raise NotImplementedError('port must be specified.')
+        raise NotImplementedError("port must be specified.")
 
     @property
     def started(self):
-        raise NotImplementedError('started must be specified.')
+        raise NotImplementedError("started must be specified.")
 
-    def start(self, extra_args='', tag=''):
+    def start(self, extra_args="", tag=""):
         """Starts an iperf3 server.
 
         Args:
@@ -287,7 +287,7 @@
             tag: Appended to log file name to identify logs from different
                 iperf runs.
         """
-        raise NotImplementedError('start() must be specified.')
+        raise NotImplementedError("start() must be specified.")
 
     def stop(self):
         """Stops the iperf server.
@@ -295,7 +295,7 @@
         Returns:
             The name of the log file generated from the terminated session.
         """
-        raise NotImplementedError('stop() must be specified.')
+        raise NotImplementedError("stop() must be specified.")
 
     def _get_full_file_path(self, tag=None):
         """Returns the full file path for the IPerfServer log file.
@@ -310,8 +310,9 @@
 
         with IPerfServerBase.__log_file_lock:
             tags = [tag, IPerfServerBase.__log_file_counter]
-            out_file_name = 'IPerfServer,%s.log' % (','.join(
-                [str(x) for x in tags if x != '' and x is not None]))
+            out_file_name = "IPerfServer,%s.log" % (
+                ",".join([str(x) for x in tags if x != "" and x is not None])
+            )
             IPerfServerBase.__log_file_counter += 1
 
         file_path = os.path.join(out_dir, out_file_name)
@@ -321,8 +322,9 @@
     @property
     def log_path(self):
         current_context = context.get_current_context()
-        full_out_dir = os.path.join(current_context.get_full_output_path(),
-                                    'IPerfServer%s' % self.port)
+        full_out_dir = os.path.join(
+            current_context.get_full_output_path(), "IPerfServer%s" % self.port
+        )
 
         # Ensure the directory exists.
         os.makedirs(full_out_dir, exist_ok=True)
@@ -332,14 +334,14 @@
 
 def _get_port_from_ss_output(ss_output, pid):
     pid = str(pid)
-    lines = ss_output.split('\n')
+    lines = ss_output.split("\n")
     for line in lines:
         if pid in line:
             # Expected format:
             # tcp LISTEN  0 5 *:<PORT>  *:* users:(("cmd",pid=<PID>,fd=3))
-            return line.split()[4].split(':')[-1]
+            return line.split()[4].split(":")[-1]
     else:
-        raise ProcessLookupError('Could not find started iperf3 process.')
+        raise ProcessLookupError("Could not find started iperf3 process.")
 
 
 class IPerfServer(IPerfServerBase):
@@ -360,7 +362,7 @@
     def started(self):
         return self._iperf_process is not None
 
-    def start(self, extra_args='', tag=''):
+    def start(self, extra_args="", tag=""):
         """Starts iperf server on local machine.
 
         Args:
@@ -375,28 +377,30 @@
         self._current_log_file = self._get_full_file_path(tag)
 
         # Run an iperf3 server on the hinted port with JSON output.
-        command = ['iperf3', '-s', '-p', str(self._hinted_port), '-J']
+        command = ["iperf3", "-s", "-p", str(self._hinted_port), "-J"]
 
         command.extend(shlex.split(extra_args))
 
         if self._last_opened_file:
             self._last_opened_file.close()
-        self._last_opened_file = open(self._current_log_file, 'w')
-        self._iperf_process = subprocess.Popen(command,
-                                               stdout=self._last_opened_file,
-                                               stderr=subprocess.DEVNULL)
+        self._last_opened_file = open(self._current_log_file, "w")
+        self._iperf_process = subprocess.Popen(
+            command, stdout=self._last_opened_file, stderr=subprocess.DEVNULL
+        )
         for attempts_left in reversed(range(3)):
             try:
                 self._port = int(
                     _get_port_from_ss_output(
-                        job.run('ss -l -p -n | grep iperf').stdout,
-                        self._iperf_process.pid))
+                        job.run("ss -l -p -n | grep iperf").stdout,
+                        self._iperf_process.pid,
+                    )
+                )
                 break
             except ProcessLookupError:
                 if attempts_left == 0:
                     raise
-                logging.debug('iperf3 process not started yet.')
-                time.sleep(.01)
+                logging.debug("iperf3 process not started yet.")
+                time.sleep(0.01)
 
     def stop(self):
         """Stops the iperf server.
@@ -423,28 +427,28 @@
 class IPerfServerOverSsh(IPerfServerBase):
     """Class that handles iperf3 operations on remote machines."""
 
-    def __init__(self,
-                 ssh_config,
-                 port,
-                 test_interface=None,
-                 use_killall=False):
+    def __init__(self, ssh_settings, port, test_interface=None, use_killall=False):
         super().__init__(port)
-        self.ssh_settings = settings.from_config(ssh_config)
+        self.ssh_settings = ssh_settings
         self.log = acts_logger.create_tagged_trace_logger(
-            f'IPerfServer | {self.ssh_settings.hostname}')
+            f"IPerfServer | {self.ssh_settings.hostname}"
+        )
         self._ssh_session = None
         self.start_ssh()
 
         self._iperf_pid = None
         self._current_tag = None
         self.hostname = self.ssh_settings.hostname
-        self._use_killall = str(use_killall).lower() == 'true'
+        self._use_killall = str(use_killall).lower() == "true"
         try:
             # A test interface can only be found if an ip address is specified.
             # A fully qualified hostname will return None for the
             # test_interface.
-            self.test_interface = test_interface if test_interface else utils.get_interface_based_on_ip(
-                self._ssh_session, self.hostname)
+            self.test_interface = (
+                test_interface
+                if test_interface
+                else utils.get_interface_based_on_ip(self._ssh_session, self.hostname)
+            )
         except Exception as e:
             self.log.warning(e)
             self.test_interface = None
@@ -458,7 +462,7 @@
         return self._iperf_pid is not None
 
     def _get_remote_log_path(self):
-        return '/tmp/iperf_server_port%s.log' % self.port
+        return "/tmp/iperf_server_port%s.log" % self.port
 
     def get_interface_ip_addresses(self, interface):
         """Gets all of the ip addresses, ipv4 and ipv6, associated with a
@@ -485,28 +489,29 @@
             self.start_ssh()
         utils.renew_linux_ip_address(self._ssh_session, self.test_interface)
 
-    def get_addr(self, addr_type='ipv4_private', timeout_sec=None):
+    def get_addr(self, addr_type="ipv4_private", timeout_sec=None):
         """Wait until a type of IP address on the test interface is available
         then return it.
         """
         if not self._ssh_session:
             self.start_ssh()
-        return utils.get_addr(self._ssh_session, self.test_interface,
-                              addr_type, timeout_sec)
+        return utils.get_addr(
+            self._ssh_session, self.test_interface, addr_type, timeout_sec
+        )
 
     def _cleanup_iperf_port(self):
         """Checks and kills zombie iperf servers occupying intended port."""
-        iperf_check_cmd = ('netstat -tulpn | grep LISTEN | grep iperf3'
-                           ' | grep :{}').format(self.port)
-        iperf_check = self._ssh_session.run(iperf_check_cmd,
-                                            ignore_status=True)
+        iperf_check_cmd = (
+            "netstat -tulpn | grep LISTEN | grep iperf3" " | grep :{}"
+        ).format(self.port)
+        iperf_check = self._ssh_session.run(iperf_check_cmd, ignore_status=True)
         iperf_check = iperf_check.stdout
         if iperf_check:
-            logging.debug('Killing zombie server on port {}'.format(self.port))
-            iperf_pid = iperf_check.split(' ')[-1].split('/')[0]
-            self._ssh_session.run('kill -9 {}'.format(str(iperf_pid)))
+            logging.debug("Killing zombie server on port {}".format(self.port))
+            iperf_pid = iperf_check.split(" ")[-1].split("/")[0]
+            self._ssh_session.run("kill -9 {}".format(str(iperf_pid)))
 
-    def start(self, extra_args='', tag='', iperf_binary=None):
+    def start(self, extra_args="", tag="", iperf_binary=None):
         """Starts iperf server on specified machine and port.
 
         Args:
@@ -524,17 +529,19 @@
             self.start_ssh()
         self._cleanup_iperf_port()
         if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
         else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_command = '{} -s -J -p {}'.format(iperf_binary, self.port)
+            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+        iperf_command = "{} -s -J -p {}".format(iperf_binary, self.port)
 
-        cmd = '{cmd} {extra_flags} > {log_file}'.format(
+        cmd = "{cmd} {extra_flags} > {log_file}".format(
             cmd=iperf_command,
             extra_flags=extra_args,
-            log_file=self._get_remote_log_path())
+            log_file=self._get_remote_log_path(),
+        )
 
         job_result = self._ssh_session.run_async(cmd)
         self._iperf_pid = job_result.stdout
@@ -550,20 +557,19 @@
             return
 
         if self._use_killall:
-            self._ssh_session.run('killall iperf3', ignore_status=True)
+            self._ssh_session.run("killall iperf3", ignore_status=True)
         else:
-            self._ssh_session.run_async('kill -9 {}'.format(
-                str(self._iperf_pid)))
+            self._ssh_session.run_async("kill -9 {}".format(str(self._iperf_pid)))
 
-        iperf_result = self._ssh_session.run('cat {}'.format(
-            self._get_remote_log_path()))
+        iperf_result = self._ssh_session.run(
+            "cat {}".format(self._get_remote_log_path())
+        )
 
         log_file = self._get_full_file_path(self._current_tag)
-        with open(log_file, 'w') as f:
+        with open(log_file, "w") as f:
             f.write(iperf_result.stdout)
 
-        self._ssh_session.run_async('rm {}'.format(
-            self._get_remote_log_path()))
+        self._ssh_session.run_async("rm {}".format(self._get_remote_log_path()))
         self._iperf_pid = None
         return log_file
 
@@ -613,8 +619,7 @@
         }
 
 
-event_bus.register_subscription(
-    _AndroidDeviceBridge.on_test_begin.subscription)
+event_bus.register_subscription(_AndroidDeviceBridge.on_test_begin.subscription)
 event_bus.register_subscription(_AndroidDeviceBridge.on_test_end.subscription)
 
 
@@ -635,7 +640,7 @@
         self._android_device_or_serial = android_device_or_serial
 
         self._iperf_process = None
-        self._current_tag = ''
+        self._current_tag = ""
 
     @property
     def port(self):
@@ -651,12 +656,13 @@
             return self._android_device_or_serial
         else:
             return _AndroidDeviceBridge.android_devices()[
-                self._android_device_or_serial]
+                self._android_device_or_serial
+            ]
 
     def _get_device_log_path(self):
-        return '~/data/iperf_server_port%s.log' % self.port
+        return "~/data/iperf_server_port%s.log" % self.port
 
-    def start(self, extra_args='', tag='', iperf_binary=None):
+    def start(self, extra_args="", tag="", iperf_binary=None):
         """Starts iperf server on an ADB device.
 
         Args:
@@ -671,23 +677,27 @@
             return
 
         if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
         else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_command = '{} -s -J -p {}'.format(iperf_binary, self.port)
+            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+        iperf_command = "{} -s -J -p {}".format(iperf_binary, self.port)
 
         self._iperf_process = self._android_device.adb.shell_nb(
-            '{cmd} {extra_flags} > {log_file} 2>&1'.format(
+            "{cmd} {extra_flags} > {log_file} 2>&1".format(
                 cmd=iperf_command,
                 extra_flags=extra_args,
-                log_file=self._get_device_log_path()))
+                log_file=self._get_device_log_path(),
+            )
+        )
 
-        self._iperf_process_adb_pid = ''
+        self._iperf_process_adb_pid = ""
         while len(self._iperf_process_adb_pid) == 0:
             self._iperf_process_adb_pid = self._android_device.adb.shell(
-                'pgrep iperf3 -n')
+                "pgrep iperf3 -n"
+            )
 
         self._current_tag = tag
 
@@ -700,26 +710,27 @@
         if self._iperf_process is None:
             return
 
-        job.run('kill -9 {}'.format(self._iperf_process.pid))
+        job.run("kill -9 {}".format(self._iperf_process.pid))
 
         # TODO(markdr): update with definitive kill method
         while True:
-            iperf_process_list = self._android_device.adb.shell('pgrep iperf3')
+            iperf_process_list = self._android_device.adb.shell("pgrep iperf3")
             if iperf_process_list.find(self._iperf_process_adb_pid) == -1:
                 break
             else:
-                self._android_device.adb.shell("kill -9 {}".format(
-                    self._iperf_process_adb_pid))
+                self._android_device.adb.shell(
+                    "kill -9 {}".format(self._iperf_process_adb_pid)
+                )
 
-        iperf_result = self._android_device.adb.shell('cat {}'.format(
-            self._get_device_log_path()))
+        iperf_result = self._android_device.adb.shell(
+            "cat {}".format(self._get_device_log_path())
+        )
 
         log_file = self._get_full_file_path(self._current_tag)
-        with open(log_file, 'w') as f:
+        with open(log_file, "w") as f:
             f.write(iperf_result)
 
-        self._android_device.adb.shell('rm {}'.format(
-            self._get_device_log_path()))
+        self._android_device.adb.shell("rm {}".format(self._get_device_log_path()))
 
         self._iperf_process = None
         return log_file
diff --git a/src/antlion/controllers/monsoon.py b/src/antlion/controllers/monsoon.py
deleted file mode 100644
index a14f4fd..0000000
--- a/src/antlion/controllers/monsoon.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.controllers.monsoon_lib.api.hvpm.monsoon import Monsoon as HvpmMonsoon
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon import \
-    Monsoon as LvpmStockMonsoon
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'Monsoon'
-ACTS_CONTROLLER_REFERENCE_NAME = 'monsoons'
-
-
-def create(configs):
-    """Takes a list of Monsoon configs and returns Monsoon Controllers.
-
-    Args:
-        configs: A list of serial numbers, or dicts in the form:
-            {
-                'type': anyof('LvpmStockMonsoon', 'HvpmMonsoon')
-                'serial': int
-            }
-
-    Returns:
-        a list of Monsoon configs
-
-    Raises:
-        ValueError if the configuration does not provide the required info.
-    """
-    objs = []
-    for config in configs:
-        monsoon_type = None
-        if isinstance(config, dict):
-            if isinstance(config.get('type', None), str):
-                if 'lvpm' in config['type'].lower():
-                    monsoon_type = LvpmStockMonsoon
-                elif 'hvpm' in config['type'].lower():
-                    monsoon_type = HvpmMonsoon
-                else:
-                    raise ValueError('Unknown monsoon type %s in Monsoon '
-                                     'config %s' % (config['type'], config))
-            if 'serial' not in config:
-                raise ValueError('Monsoon config must specify "serial".')
-            serial_number = int(config.get('serial'))
-        else:
-            serial_number = int(config)
-        if monsoon_type is None:
-            if serial_number < 20000:
-                # This code assumes the LVPM has firmware version 20. If
-                # someone has updated the firmware, or somehow found an older
-                # version, the power measurement will fail.
-                monsoon_type = LvpmStockMonsoon
-            else:
-                monsoon_type = HvpmMonsoon
-
-        objs.append(monsoon_type(serial=serial_number))
-    return objs
-
-
-def destroy(monsoons):
-    for monsoon in monsoons:
-        if monsoon.is_allocated():
-            monsoon.release_monsoon_connection()
-
-
-def get_info(monsoons):
-    return [{'serial': monsoon.serial} for monsoon in monsoons]
diff --git a/src/antlion/controllers/monsoon_lib/__init__.py b/src/antlion/controllers/monsoon_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/__init__.py b/src/antlion/controllers/monsoon_lib/api/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/api/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/common.py b/src/antlion/controllers/monsoon_lib/api/common.py
deleted file mode 100644
index 70e0a0a..0000000
--- a/src/antlion/controllers/monsoon_lib/api/common.py
+++ /dev/null
@@ -1,157 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.signals import ControllerError
-
-
-class MonsoonError(ControllerError):
-    """Raised for exceptions encountered when interfacing with a Monsoon device.
-    """
-
-
-class PassthroughStates(object):
-    """An enum containing the values for power monitor's passthrough states."""
-    # "Off" or 0 means USB always off.
-    OFF = 0
-    # "On" or 1 means USB always on.
-    ON = 1
-    # "Auto" or 2 means USB is automatically turned off during sampling, and
-    # turned back on after sampling.
-    AUTO = 2
-
-
-PASSTHROUGH_STATES = {
-    'off': PassthroughStates.OFF,
-    'on': PassthroughStates.ON,
-    'auto': PassthroughStates.AUTO
-}
-
-
-class MonsoonDataRecord(object):
-    """A data class for Monsoon data points."""
-    def __init__(self, sample_time, relative_time, current):
-        """Creates a new MonsoonDataRecord.
-
-        Args:
-            sample_time: the unix timestamp of the sample.
-            relative_time: the time since the start of the measurement.
-            current: The current in Amperes as a string.
-        """
-        self._sample_time = sample_time
-        self._relative_time = relative_time
-        self._current = current
-
-    @property
-    def time(self):
-        """The time the record was fetched."""
-        return self._sample_time
-
-    @property
-    def relative_time(self):
-        """The time the record was fetched, relative to collection start."""
-        return self._relative_time
-
-    @property
-    def current(self):
-        """The amount of current in Amperes measured for the given record."""
-        return self._current
-
-
-class MonsoonResult(object):
-    """An object that contains aggregated data collected during sampling.
-
-    Attributes:
-        _num_samples: The number of samples gathered.
-        _sum_currents: The total sum of all current values gathered, in amperes.
-        _hz: The frequency sampling is being done at.
-        _voltage: The voltage output during sampling.
-    """
-
-    # The number of decimal places to round a value to.
-    ROUND_TO = 6
-
-    def __init__(self, num_samples, sum_currents, hz, voltage, datafile_path):
-        """Creates a new MonsoonResult.
-
-        Args:
-            num_samples: the number of samples collected.
-            sum_currents: the total summation of every current measurement.
-            hz: the number of samples per second.
-            voltage: the voltage used during the test.
-            datafile_path: the path to the monsoon data file.
-        """
-        self._num_samples = num_samples
-        self._sum_currents = sum_currents
-        self._hz = hz
-        self._voltage = voltage
-        self.tag = datafile_path
-
-    def get_data_points(self):
-        """Returns an iterator of MonsoonDataRecords."""
-        class MonsoonDataIterator:
-            def __init__(self, file):
-                self.file = file
-
-            def __iter__(self):
-                with open(self.file, 'r') as f:
-                    start_time = None
-                    for line in f:
-                        # Remove the newline character.
-                        line.strip()
-                        sample_time, current = map(float, line.split(' '))
-                        if start_time is None:
-                            start_time = sample_time
-                        yield MonsoonDataRecord(sample_time,
-                                                sample_time - start_time,
-                                                current)
-
-        return MonsoonDataIterator(self.tag)
-
-    @property
-    def num_samples(self):
-        """The number of samples recorded during the test."""
-        return self._num_samples
-
-    @property
-    def average_current(self):
-        """Average current in mA."""
-        if self.num_samples == 0:
-            return 0
-        return round(self._sum_currents * 1000 / self.num_samples,
-                     self.ROUND_TO)
-
-    @property
-    def total_charge(self):
-        """Total charged used in the unit of mAh."""
-        return round((self._sum_currents / self._hz) * 1000 / 3600,
-                     self.ROUND_TO)
-
-    @property
-    def total_power(self):
-        """Total power used."""
-        return round(self.average_current * self._voltage, self.ROUND_TO)
-
-    @property
-    def voltage(self):
-        """The voltage during the measurement (in Volts)."""
-        return self._voltage
-
-    def __str__(self):
-        return ('avg current: %s\n'
-                'total charge: %s\n'
-                'total power: %s\n'
-                'total samples: %s' % (self.average_current, self.total_charge,
-                                      self.total_power, self._num_samples))
diff --git a/src/antlion/controllers/monsoon_lib/api/hvpm/__init__.py b/src/antlion/controllers/monsoon_lib/api/hvpm/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/api/hvpm/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/hvpm/monsoon.py b/src/antlion/controllers/monsoon_lib/api/hvpm/monsoon.py
deleted file mode 100644
index 44afba2..0000000
--- a/src/antlion/controllers/monsoon_lib/api/hvpm/monsoon.py
+++ /dev/null
@@ -1,166 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import multiprocessing
-import time
-
-from Monsoon import HVPM
-from Monsoon import Operations as op
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonResult
-from antlion.controllers.monsoon_lib.api.monsoon import BaseMonsoon
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import AssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLine
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import DownSampler
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import SampleAggregator
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import Tee
-from antlion.controllers.monsoon_lib.sampling.hvpm.transformers import HvpmTransformer
-
-
-class Monsoon(BaseMonsoon):
-    """The controller class for interacting with the HVPM Monsoon."""
-
-    # The device doesn't officially support voltages lower than this. Note that
-    # 0 is a valid voltage.
-    MIN_VOLTAGE = 0.8
-
-    # The Monsoon doesn't support setting higher voltages than this directly
-    # without tripping overvoltage.
-    # Note that it is possible to increase the voltage above this value by
-    # increasing the voltage by small increments over a period of time.
-    # The communication protocol supports up to 16V.
-    MAX_VOLTAGE = 13.5
-
-    def __init__(self, serial):
-        super().__init__()
-        self.serial = serial
-        self._mon = HVPM.Monsoon()
-        self._mon.setup_usb(serial)
-        self._allocated = True
-        if self._mon.Protocol.DEVICE is None:
-            raise ValueError('HVPM Monsoon %s could not be found.' % serial)
-
-    def set_voltage(self, voltage):
-        """Sets the output voltage of monsoon.
-
-        Args:
-            voltage: The voltage to set the output to.
-        """
-        self._log.debug('Setting voltage to %sV.' % voltage)
-        self._mon.setVout(voltage)
-
-    def set_max_current(self, amperes):
-        """Sets monsoon's max output current.
-
-        Args:
-            amperes: The max current in A.
-        """
-        self._mon.setRunTimeCurrentLimit(amperes)
-
-    def set_max_initial_current(self, amperes):
-        """Sets the max power-up/initial current.
-
-        Args:
-            amperes: The max initial current allowed in amperes.
-        """
-        self._mon.setPowerUpCurrentLimit(amperes)
-
-    @property
-    def status(self):
-        """Gets the status params of monsoon.
-
-        Returns:
-            A dictionary of {status param, value} key-value pairs.
-        """
-        self._mon.fillStatusPacket()
-        return self._mon.statusPacket
-
-    def _set_usb_passthrough_mode(self, mode):
-        """Sends the call to set usb passthrough mode.
-
-        Args:
-            mode: The state to set the USB passthrough to. Can either be the
-                string name of the state or the integer value.
-
-                "Off" or 0 means USB always off.
-                "On" or 1 means USB always on.
-                "Auto" or 2 means USB is automatically turned off during
-                    sampling, and turned back on after sampling.
-        """
-        self._mon.setUSBPassthroughMode(mode)
-
-    def _get_main_voltage(self):
-        """Returns the value of the voltage on the main channel."""
-        # Any getValue call on a setX function will return the value set for X.
-        # Using this, we can pull the last setMainVoltage (or its default).
-        return (self._mon.Protocol.getValue(op.OpCodes.setMainVoltage, 4) /
-                op.Conversion.FLOAT_TO_INT)
-
-    def measure_power(self,
-                      duration,
-                      measure_after_seconds=0,
-                      hz=5000,
-                      output_path=None,
-                      transformers=None):
-        """See parent docstring for details."""
-        voltage = self._get_main_voltage()
-
-        aggregator = SampleAggregator(measure_after_seconds)
-        manager = multiprocessing.Manager()
-
-        assembly_line_builder = AssemblyLineBuilder(manager.Queue,
-                                                    ThreadAssemblyLine)
-        assembly_line_builder.source(
-            HvpmTransformer(self.serial, duration + measure_after_seconds))
-        if hz != 5000:
-            assembly_line_builder.into(DownSampler(int(5000 / hz)))
-        if output_path:
-            assembly_line_builder.into(Tee(output_path, measure_after_seconds))
-        assembly_line_builder.into(aggregator)
-        if transformers:
-            for transformer in transformers:
-                assembly_line_builder.into(transformer)
-
-        self.take_samples(assembly_line_builder.build())
-
-        manager.shutdown()
-
-        self._mon.setup_usb(self.serial)
-        self._allocated = True
-        monsoon_data = MonsoonResult(aggregator.num_samples,
-                                     aggregator.sum_currents, hz, voltage,
-                                     output_path)
-        self._log.info('Measurement summary:\n%s', str(monsoon_data))
-        return monsoon_data
-
-    def reconnect_monsoon(self):
-        """Reconnect Monsoon to serial port."""
-        self.release_monsoon_connection()
-        self._log.info('Closed monsoon connection.')
-        time.sleep(5)
-        self.establish_monsoon_connection()
-
-    def release_monsoon_connection(self):
-        self._mon.closeDevice()
-        self._allocated = False
-
-    def is_allocated(self):
-        return self._allocated
-
-    def establish_monsoon_connection(self):
-        self._mon.setup_usb(self.serial)
-        # Makes sure the Monsoon is in the command-receiving state.
-        self._mon.stopSampling()
diff --git a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/__init__.py b/src/antlion/controllers/monsoon_lib/api/lvpm_stock/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon.py b/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon.py
deleted file mode 100644
index 063972f..0000000
--- a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import multiprocessing
-import time
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonResult
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon_proxy import MonsoonProxy
-from antlion.controllers.monsoon_lib.api.monsoon import BaseMonsoon
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import AssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLine
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import DownSampler
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import SampleAggregator
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import Tee
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.stock_transformers import StockLvpmSampler
-
-
-class Monsoon(BaseMonsoon):
-    """The controller class for interacting with the LVPM Monsoon."""
-
-    # The device protocol has a floor value for positive voltages. Note that 0
-    # is still a valid voltage.
-    MIN_VOLTAGE = 2.01
-
-    # The device protocol does not support values above this.
-    MAX_VOLTAGE = 4.55
-
-    def __init__(self, serial, device=None):
-        super().__init__()
-        self._mon = MonsoonProxy(serialno=serial, device=device)
-        self._allocated = True
-        self.serial = serial
-
-    def set_voltage(self, voltage):
-        """Sets the output voltage of monsoon.
-
-        Args:
-            voltage: Voltage to set the output to.
-        """
-        self._log.debug('Setting voltage to %sV.' % voltage)
-        self._mon.set_voltage(voltage)
-
-    def set_max_current(self, amperes):
-        """Sets monsoon's max output current.
-
-        Args:
-            amperes: The max current in A.
-        """
-        self._mon.set_max_current(amperes)
-
-    def set_max_initial_current(self, amperes):
-        """Sets the max power-up/initial current.
-
-        Args:
-            amperes: The max initial current allowed in amperes.
-        """
-        self._mon.set_max_initial_current(amperes)
-
-    @property
-    def status(self):
-        """Gets the status params of monsoon.
-
-        Returns:
-            A dictionary of {status param, value} key-value pairs.
-        """
-        return self._mon.get_status()
-
-    def _set_usb_passthrough_mode(self, mode):
-        """Sends the call to set usb passthrough mode.
-
-        Args:
-            mode: The state to set the USB passthrough to. Can either be the
-                string name of the state or the integer value.
-
-                "Off" or 0 means USB always off.
-                "On" or 1 means USB always on.
-                "Auto" or 2 means USB is automatically turned off during
-                    sampling, and turned back on after sampling.
-        """
-        self._mon.set_usb_passthrough(mode)
-
-    def measure_power(self,
-                      duration,
-                      measure_after_seconds=0,
-                      hz=5000,
-                      output_path=None,
-                      transformers=None):
-        """See parent docstring for details."""
-        voltage = self._mon.get_voltage()
-
-        aggregator = SampleAggregator(measure_after_seconds)
-        manager = multiprocessing.Manager()
-
-        assembly_line_builder = AssemblyLineBuilder(manager.Queue,
-                                                    ThreadAssemblyLine)
-        assembly_line_builder.source(
-            StockLvpmSampler(self.serial, duration + measure_after_seconds))
-        if hz != 5000:
-            assembly_line_builder.into(DownSampler(int(round(5000 / hz))))
-        if output_path is not None:
-            assembly_line_builder.into(Tee(output_path, measure_after_seconds))
-        assembly_line_builder.into(aggregator)
-        if transformers:
-            for transformer in transformers:
-                assembly_line_builder.into(transformer)
-
-        self.take_samples(assembly_line_builder.build())
-
-        manager.shutdown()
-
-        monsoon_data = MonsoonResult(aggregator.num_samples,
-                                     aggregator.sum_currents, hz, voltage,
-                                     output_path)
-        self._log.info('Measurement summary:\n%s', str(monsoon_data))
-        return monsoon_data
-
-    def reconnect_monsoon(self):
-        """Reconnect Monsoon to serial port."""
-        self._log.debug('Close serial connection')
-        self._mon.ser.close()
-        self._log.debug('Reset serial port')
-        time.sleep(5)
-        self._log.debug('Open serial connection')
-        self._mon.ser.open()
-        self._mon.ser.reset_input_buffer()
-        self._mon.ser.reset_output_buffer()
-
-    def release_monsoon_connection(self):
-        self._mon.release_dev_port()
-        self._allocated = False
-
-    def is_allocated(self):
-        return self._allocated
-
-    def establish_monsoon_connection(self):
-        self._mon.obtain_dev_port()
-        self._allocated = True
-        # Makes sure the Monsoon is in the command-receiving state.
-        self._mon.stop_data_collection()
diff --git a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon_proxy.py b/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon_proxy.py
deleted file mode 100644
index 909bff4..0000000
--- a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon_proxy.py
+++ /dev/null
@@ -1,402 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The interface for a USB-connected Monsoon power meter.
-
-Details on the protocol can be found at
-(http://msoon.com/LabEquipment/PowerMonitor/)
-
-Based on the original py2 script of kens@google.com.
-"""
-import collections
-import fcntl
-import logging
-import os
-import select
-import struct
-import sys
-import time
-
-import errno
-import serial
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-
-
-class LvpmStatusPacket(object):
-    """The data received from asking an LVPM Monsoon for its status.
-
-    Attributes names with the same values as HVPM match those defined in
-    Monsoon.Operations.statusPacket.
-    """
-
-    def __init__(self, values):
-        iter_value = iter(values)
-        self.packetType = next(iter_value)
-        self.firmwareVersion = next(iter_value)
-        self.protocolVersion = next(iter_value)
-        self.mainFineCurrent = next(iter_value)
-        self.usbFineCurrent = next(iter_value)
-        self.auxFineCurrent = next(iter_value)
-        self.voltage1 = next(iter_value)
-        self.mainCoarseCurrent = next(iter_value)
-        self.usbCoarseCurrent = next(iter_value)
-        self.auxCoarseCurrent = next(iter_value)
-        self.voltage2 = next(iter_value)
-        self.outputVoltageSetting = next(iter_value)
-        self.temperature = next(iter_value)
-        self.status = next(iter_value)
-        self.leds = next(iter_value)
-        self.mainFineResistor = next(iter_value)
-        self.serialNumber = next(iter_value)
-        self.sampleRate = next(iter_value)
-        self.dacCalLow = next(iter_value)
-        self.dacCalHigh = next(iter_value)
-        self.powerupCurrentLimit = next(iter_value)
-        self.runtimeCurrentLimit = next(iter_value)
-        self.powerupTime = next(iter_value)
-        self.usbFineResistor = next(iter_value)
-        self.auxFineResistor = next(iter_value)
-        self.initialUsbVoltage = next(iter_value)
-        self.initialAuxVoltage = next(iter_value)
-        self.hardwareRevision = next(iter_value)
-        self.temperatureLimit = next(iter_value)
-        self.usbPassthroughMode = next(iter_value)
-        self.mainCoarseResistor = next(iter_value)
-        self.usbCoarseResistor = next(iter_value)
-        self.auxCoarseResistor = next(iter_value)
-        self.defMainFineResistor = next(iter_value)
-        self.defUsbFineResistor = next(iter_value)
-        self.defAuxFineResistor = next(iter_value)
-        self.defMainCoarseResistor = next(iter_value)
-        self.defUsbCoarseResistor = next(iter_value)
-        self.defAuxCoarseResistor = next(iter_value)
-        self.eventCode = next(iter_value)
-        self.eventData = next(iter_value)
-
-
-class MonsoonProxy(object):
-    """Class that directly talks to monsoon over serial.
-
-    Provides a simple class to use the power meter.
-    See http://wiki/Main/MonsoonProtocol for information on the protocol.
-    """
-
-    # The format of the status packet.
-    STATUS_FORMAT = '>BBBhhhHhhhHBBBxBbHBHHHHBbbHHBBBbbbbbbbbbBH'
-
-    # The list of fields that appear in the Monsoon status packet.
-    STATUS_FIELDS = [
-        'packetType',
-        'firmwareVersion',
-        'protocolVersion',
-        'mainFineCurrent',
-        'usbFineCurrent',
-        'auxFineCurrent',
-        'voltage1',
-        'mainCoarseCurrent',
-        'usbCoarseCurrent',
-        'auxCoarseCurrent',
-        'voltage2',
-        'outputVoltageSetting',
-        'temperature',
-        'status',
-        'leds',
-        'mainFineResistorOffset',
-        'serialNumber',
-        'sampleRate',
-        'dacCalLow',
-        'dacCalHigh',
-        'powerupCurrentLimit',
-        'runtimeCurrentLimit',
-        'powerupTime',
-        'usbFineResistorOffset',
-        'auxFineResistorOffset',
-        'initialUsbVoltage',
-        'initialAuxVoltage',
-        'hardwareRevision',
-        'temperatureLimit',
-        'usbPassthroughMode',
-        'mainCoarseResistorOffset',
-        'usbCoarseResistorOffset',
-        'auxCoarseResistorOffset',
-        'defMainFineResistor',
-        'defUsbFineResistor',
-        'defAuxFineResistor',
-        'defMainCoarseResistor',
-        'defUsbCoarseResistor',
-        'defAuxCoarseResistor',
-        'eventCode',
-        'eventData',
-    ]
-
-    def __init__(self, device=None, serialno=None, connection_timeout=600):
-        """Establish a connection to a Monsoon.
-
-        By default, opens the first available port, waiting if none are ready.
-
-        Args:
-            device: The particular device port to be used.
-            serialno: The Monsoon's serial number.
-            connection_timeout: The number of seconds to wait for the device to
-                connect.
-
-        Raises:
-            TimeoutError if unable to connect to the device.
-        """
-        self.start_voltage = 0
-        self.serial = serialno
-
-        if device:
-            self.ser = serial.Serial(device, timeout=1)
-            return
-        # Try all devices connected through USB virtual serial ports until we
-        # find one we can use.
-        self._tempfile = None
-        self.obtain_dev_port(connection_timeout)
-        self.log = logging.getLogger()
-
-    def obtain_dev_port(self, timeout=600):
-        """Obtains the device port for this Monsoon.
-
-        Args:
-            timeout: The time in seconds to wait for the device to connect.
-
-        Raises:
-            TimeoutError if the device was unable to be found, or was not
-            available.
-        """
-        start_time = time.time()
-
-        while start_time + timeout > time.time():
-            for dev in os.listdir('/dev'):
-                prefix = 'ttyACM'
-                # Prefix is different on Mac OS X.
-                if sys.platform == 'darwin':
-                    prefix = 'tty.usbmodem'
-                if not dev.startswith(prefix):
-                    continue
-                tmpname = '/tmp/monsoon.%s.%s' % (os.uname()[0], dev)
-                self._tempfile = open(tmpname, 'w')
-                if not os.access(tmpname, os.R_OK | os.W_OK):
-                    try:
-                        os.chmod(tmpname, 0o666)
-                    except OSError as e:
-                        if e.errno == errno.EACCES:
-                            raise ValueError(
-                                'Unable to set permissions to read/write to '
-                                '%s. This file is owned by another user; '
-                                'please grant o+wr access to this file, or '
-                                'run as that user.')
-                        raise
-
-                try:  # Use a lock file to ensure exclusive access.
-                    fcntl.flock(self._tempfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
-                except IOError:
-                    logging.error('Device %s is in use.', repr(dev))
-                    continue
-
-                try:  # try to open the device
-                    self.ser = serial.Serial('/dev/%s' % dev, timeout=1)
-                    self.stop_data_collection()  # just in case
-                    self._flush_input()  # discard stale input
-                    status = self.get_status()
-                except Exception as e:
-                    logging.warning('Error opening device %s: %s', dev, e,
-                                    exc_info=True)
-                    continue
-
-                if not status:
-                    logging.error('No response from device %s.', dev)
-                elif self.serial and status.serialNumber != self.serial:
-                    logging.error('Another device serial #%d seen on %s',
-                                  status.serialNumber, dev)
-                else:
-                    self.start_voltage = status.voltage1
-                    return
-
-            self._tempfile = None
-            logging.info('Waiting for device...')
-            time.sleep(1)
-        raise TimeoutError(
-            'Unable to connect to Monsoon device with '
-            'serial "%s" within %s seconds.' % (self.serial, timeout))
-
-    def release_dev_port(self):
-        """Releases the dev port used to communicate with the Monsoon device."""
-        fcntl.flock(self._tempfile, fcntl.LOCK_UN)
-        self._tempfile.close()
-        self.ser.close()
-
-    def get_status(self):
-        """Requests and waits for status.
-
-        Returns:
-            status dictionary.
-        """
-        self._send_struct('BBB', 0x01, 0x00, 0x00)
-        read_bytes = self._read_packet()
-
-        if not read_bytes:
-            raise MonsoonError('Failed to read Monsoon status')
-        expected_size = struct.calcsize(self.STATUS_FORMAT)
-        if len(read_bytes) != expected_size or read_bytes[0] != 0x10:
-            raise MonsoonError('Wanted status, dropped type=0x%02x, len=%d',
-                               read_bytes[0], len(read_bytes))
-
-        status = collections.OrderedDict(
-            zip(self.STATUS_FIELDS,
-                struct.unpack(self.STATUS_FORMAT, read_bytes)))
-        p_type = status['packetType']
-        if p_type != 0x10:
-            raise MonsoonError('Packet type %s is not 0x10.' % p_type)
-
-        for k in status.keys():
-            if k.endswith('VoltageSetting'):
-                status[k] = 2.0 + status[k] * 0.01
-            elif k.endswith('FineCurrent'):
-                pass  # needs calibration data
-            elif k.endswith('CoarseCurrent'):
-                pass  # needs calibration data
-            elif k.startswith('voltage') or k.endswith('Voltage'):
-                status[k] = status[k] * 0.000125
-            elif k.endswith('Resistor'):
-                status[k] = 0.05 + status[k] * 0.0001
-                if k.startswith('aux') or k.startswith('defAux'):
-                    status[k] += 0.05
-            elif k.endswith('CurrentLimit'):
-                status[k] = 8 * (1023 - status[k]) / 1023.0
-        return LvpmStatusPacket(status.values())
-
-    def set_voltage(self, voltage):
-        """Sets the voltage on the device to the specified value.
-
-        Args:
-            voltage: Either 0 or a value between 2.01 and 4.55 inclusive.
-
-        Raises:
-            struct.error if voltage is an invalid value.
-        """
-        # The device has a range of 255 voltage values:
-        #
-        #     0   is "off". Note this value not set outputVoltageSetting to
-        #             zero. The previous outputVoltageSetting value is
-        #             maintained.
-        #     1   is 2.01V.
-        #     255 is 4.55V.
-        voltage_byte = max(0, round((voltage - 2.0) * 100))
-        self._send_struct('BBB', 0x01, 0x01, voltage_byte)
-
-    def get_voltage(self):
-        """Get the output voltage.
-
-        Returns:
-            Current Output Voltage (in unit of V).
-        """
-        return self.get_status().outputVoltageSetting
-
-    def set_max_current(self, i):
-        """Set the max output current."""
-        if i < 0 or i > 8:
-            raise MonsoonError(('Target max current %sA, is out of acceptable '
-                                'range [0, 8].') % i)
-        val = 1023 - int((i / 8) * 1023)
-        self._send_struct('BBB', 0x01, 0x0a, val & 0xff)
-        self._send_struct('BBB', 0x01, 0x0b, val >> 8)
-
-    def set_max_initial_current(self, current):
-        """Sets the maximum initial current, in mA."""
-        if current < 0 or current > 8:
-            raise MonsoonError(('Target max current %sA, is out of acceptable '
-                                'range [0, 8].') % current)
-        val = 1023 - int((current / 8) * 1023)
-        self._send_struct('BBB', 0x01, 0x08, val & 0xff)
-        self._send_struct('BBB', 0x01, 0x09, val >> 8)
-
-    def set_usb_passthrough(self, passthrough_mode):
-        """Set the USB passthrough mode.
-
-        Args:
-            passthrough_mode: The mode used for passthrough. Must be the integer
-                value. See common.PassthroughModes for a list of values and
-                their meanings.
-        """
-        self._send_struct('BBB', 0x01, 0x10, passthrough_mode)
-
-    def get_usb_passthrough(self):
-        """Get the USB passthrough mode: 0 = off, 1 = on,  2 = auto.
-
-        Returns:
-            The mode used for passthrough, as an integer. See
-                common.PassthroughModes for a list of values and their meanings.
-        """
-        return self.get_status().usbPassthroughMode
-
-    def start_data_collection(self):
-        """Tell the device to start collecting and sending measurement data."""
-        self._send_struct('BBB', 0x01, 0x1b, 0x01)  # Mystery command
-        self._send_struct('BBBBBBB', 0x02, 0xff, 0xff, 0xff, 0xff, 0x03, 0xe8)
-
-    def stop_data_collection(self):
-        """Tell the device to stop collecting measurement data."""
-        self._send_struct('BB', 0x03, 0x00)  # stop
-
-    def _send_struct(self, fmt, *args):
-        """Pack a struct (without length or checksum) and send it."""
-        # Flush out the input buffer before sending data
-        self._flush_input()
-        data = struct.pack(fmt, *args)
-        data_len = len(data) + 1
-        checksum = (data_len + sum(bytearray(data))) % 256
-        out = struct.pack('B', data_len) + data + struct.pack('B', checksum)
-        self.ser.write(out)
-
-    def _read_packet(self):
-        """Returns a single packet as a string (without length or checksum)."""
-        len_char = self.ser.read(1)
-        if not len_char:
-            raise MonsoonError('Reading from serial port timed out')
-
-        data_len = ord(len_char)
-        if not data_len:
-            return ''
-        result = self.ser.read(int(data_len))
-        result = bytearray(result)
-        if len(result) != data_len:
-            raise MonsoonError(
-                'Length mismatch, expected %d bytes, got %d bytes.', data_len,
-                len(result))
-        body = result[:-1]
-        checksum = (sum(struct.unpack('B' * len(body), body)) + data_len) % 256
-        if result[-1] != checksum:
-            raise MonsoonError(
-                'Invalid checksum from serial port! Expected %s, got %s',
-                hex(checksum), hex(result[-1]))
-        return result[:-1]
-
-    def _flush_input(self):
-        """Flushes all read data until the input is empty."""
-        self.ser.reset_input_buffer()
-        while True:
-            ready_r, ready_w, ready_x = select.select([self.ser], [],
-                                                      [self.ser], 0)
-            if len(ready_x) > 0:
-                raise MonsoonError('Exception from serial port.')
-            elif len(ready_r) > 0:
-                self.ser.read(1)  # This may cause underlying buffering.
-                # Flush the underlying buffer too.
-                self.ser.reset_input_buffer()
-            else:
-                break
diff --git a/src/antlion/controllers/monsoon_lib/api/monsoon.py b/src/antlion/controllers/monsoon_lib/api/monsoon.py
deleted file mode 100644
index 68ab81c..0000000
--- a/src/antlion/controllers/monsoon_lib/api/monsoon.py
+++ /dev/null
@@ -1,300 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import time
-
-from antlion.controllers.monsoon_lib.api import common
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-from antlion.controllers.monsoon_lib.api.common import PassthroughStates
-
-
-class BaseMonsoon(object):
-    """The base class for all Monsoon interface devices.
-
-    Attributes:
-        on_reconnect: The function to call when Monsoon has reconnected USB.
-            Raises TimeoutError if the device cannot be found.
-        on_disconnect: The function to call when Monsoon has disconnected USB.
-    """
-
-    # The minimum non-zero supported voltage for the given Monsoon device.
-    MIN_VOLTAGE = NotImplemented
-
-    # The maximum practical voltage for the given Monsoon device.
-    MAX_VOLTAGE = NotImplemented
-
-    # When ramping voltage, the rate in volts/second to increase the voltage.
-    VOLTAGE_RAMP_RATE = 3
-
-    # The time step between voltage increments. This value does not need to be
-    # modified.
-    VOLTAGE_RAMP_TIME_STEP = .1
-
-    def __init__(self):
-        self._log = logging.getLogger()
-        self.on_disconnect = lambda: None
-        self.on_reconnect = lambda: None
-
-    @classmethod
-    def get_closest_valid_voltage(cls, voltage):
-        """Returns the nearest valid voltage value."""
-        if voltage < cls.MIN_VOLTAGE / 2:
-            return 0
-        else:
-            return max(cls.MIN_VOLTAGE, min(voltage, cls.MAX_VOLTAGE))
-
-    @classmethod
-    def is_voltage_valid(cls, voltage):
-        """Returns True iff the given voltage can be set on the device.
-
-        Valid voltage values are {x | x ∈ {0} ∪ [MIN_VOLTAGE, MAX_VOLTAGE]}.
-        """
-        return cls.get_closest_valid_voltage(voltage) == voltage
-
-    @classmethod
-    def validate_voltage(cls, voltage):
-        """Raises a MonsoonError if the given voltage cannot be set."""
-        if not cls.is_voltage_valid(voltage):
-            raise MonsoonError('Invalid voltage %s. Voltage must be zero or '
-                               'within range [%s, %s].' %
-                               (voltage, cls.MIN_VOLTAGE, cls.MAX_VOLTAGE))
-
-    def set_voltage_safe(self, voltage):
-        """Sets the output voltage of monsoon to a safe value.
-
-        This function is effectively:
-            self.set_voltage(self.get_closest_valid_voltage(voltage)).
-
-        Args:
-            voltage: The voltage to set the output to.
-        """
-        normalized_voltage = self.get_closest_valid_voltage(voltage)
-        if voltage != normalized_voltage:
-            self._log.debug(
-                'Requested voltage %sV is invalid.' % voltage)
-        self.set_voltage(normalized_voltage)
-
-    def ramp_voltage(self, start, end):
-        """Ramps up the voltage to the specified end voltage.
-
-        Increments the voltage by fixed intervals of .1 Volts every .1 seconds.
-
-        Args:
-            start: The starting voltage
-            end: the end voltage. Must be higher than the starting voltage.
-        """
-        voltage = start
-
-        while voltage < end:
-            self.set_voltage(self.get_closest_valid_voltage(voltage))
-            voltage += self.VOLTAGE_RAMP_RATE * self.VOLTAGE_RAMP_TIME_STEP
-            time.sleep(self.VOLTAGE_RAMP_TIME_STEP)
-        self.set_voltage(end)
-
-    def usb(self, state):
-        """Sets the monsoon's USB passthrough mode.
-
-        This is specific to the USB port in front of the monsoon box which
-        connects to the powered device, NOT the USB that is used to talk to the
-        monsoon itself.
-
-        Args:
-            state: The state to set the USB passthrough to. Can either be the
-                string name of the state or the integer value.
-
-                "Off" or 0 means USB always off.
-                "On" or 1 means USB always on.
-                "Auto" or 2 means USB is automatically turned off during
-                    sampling, and turned back on after sampling.
-
-        Raises:
-            ValueError if the state given is invalid.
-            TimeoutError if unable to set the passthrough mode within a minute,
-                or if the device was not found after setting the state to ON.
-        """
-        expected_state = None
-        states_dict = common.PASSTHROUGH_STATES
-        if isinstance(state, str):
-            normalized_state = state.lower()
-            expected_state = states_dict.get(normalized_state, None)
-        elif state in states_dict.values():
-            expected_state = state
-
-        if expected_state is None:
-            raise ValueError(
-                'USB passthrough state %s is not a valid state. '
-                'Expected any of %s.' % (repr(state), states_dict))
-        if self.status.usbPassthroughMode == expected_state:
-            return
-
-        if expected_state in [PassthroughStates.OFF, PassthroughStates.AUTO]:
-            self.on_disconnect()
-
-        start_time = time.time()
-        time_limit_seconds = 60
-        while self.status.usbPassthroughMode != expected_state:
-            current_time = time.time()
-            if current_time >= start_time + time_limit_seconds:
-                raise TimeoutError('Setting USB mode timed out after %s '
-                                   'seconds.' % time_limit_seconds)
-            self._set_usb_passthrough_mode(expected_state)
-            time.sleep(1)
-        self._log.info('Monsoon usbPassthroughMode is now "%s"',
-                       state)
-
-        if expected_state in [PassthroughStates.ON]:
-            self._on_reconnect()
-
-    def attach_device(self, android_device):
-        """Deprecated. Use the connection callbacks instead."""
-
-        def on_reconnect():
-            # Make sure the device is connected and available for commands.
-            android_device.wait_for_boot_completion()
-            android_device.start_services()
-            # Release wake lock to put device into sleep.
-            android_device.droid.goToSleepNow()
-            self._log.info('Dut reconnected.')
-
-        def on_disconnect():
-            android_device.stop_services()
-            time.sleep(1)
-
-        self.on_reconnect = on_reconnect
-        self.on_disconnect = on_disconnect
-
-    def set_on_disconnect(self, callback):
-        """Sets the callback to be called when Monsoon disconnects USB."""
-        self.on_disconnect = callback
-
-    def set_on_reconnect(self, callback):
-        """Sets the callback to be called when Monsoon reconnects USB."""
-        self.on_reconnect = callback
-
-    def take_samples(self, assembly_line):
-        """Runs the sampling procedure based on the given assembly line."""
-        # Sampling is always done in a separate process. Release the Monsoon
-        # so the child process can sample from the Monsoon.
-        self.release_monsoon_connection()
-
-        try:
-            assembly_line.run()
-        finally:
-            self.establish_monsoon_connection()
-
-    def measure_power(self,
-                      duration,
-                      measure_after_seconds=0,
-                      hz=5000,
-                      output_path=None,
-                      transformers=None):
-        """Measure power consumption of the attached device.
-
-        This function is a default implementation of measuring power consumption
-        during gathering measurements. For offline methods, use take_samples()
-        with a custom AssemblyLine.
-
-        Args:
-            duration: Amount of time to measure power for. Note:
-                total_duration = duration + measure_after_seconds
-            measure_after_seconds: Number of seconds to wait before beginning
-                reading measurement.
-            hz: The number of samples to collect per second. Must be a factor
-                of 5000.
-            output_path: The location to write the gathered data to.
-            transformers: A list of Transformer objects that receive passed-in
-                          samples. Runs in order sent.
-
-        Returns:
-            A MonsoonData object with the measured power data.
-        """
-        raise NotImplementedError()
-
-    def set_voltage(self, voltage):
-        """Sets the output voltage of monsoon.
-
-        Args:
-            voltage: The voltage to set the output to.
-        """
-        raise NotImplementedError()
-
-    def set_max_current(self, amperes):
-        """Sets monsoon's max output current.
-
-        Args:
-            amperes: The max current in A.
-        """
-        raise NotImplementedError()
-
-    def set_max_initial_current(self, amperes):
-        """Sets the max power-up/initial current.
-
-        Args:
-            amperes: The max initial current allowed in amperes.
-        """
-        raise NotImplementedError()
-
-    @property
-    def status(self):
-        """Gets the status params of monsoon.
-
-        Returns:
-            A dictionary of {status param, value} key-value pairs.
-        """
-        raise NotImplementedError()
-
-    def _on_reconnect(self):
-        """Reconnects the DUT over USB.
-
-        Raises:
-            TimeoutError upon failure to reconnect over USB.
-        """
-        self._log.info('Reconnecting dut.')
-        # Wait for two seconds to ensure that the device is ready, then
-        # attempt to reconnect. If reconnect times out, reset the passthrough
-        # state and try again.
-        time.sleep(2)
-        try:
-            self.on_reconnect()
-        except TimeoutError as err:
-            self._log.info('Toggling USB and trying again. %s' % err)
-            self.usb(PassthroughStates.OFF)
-            time.sleep(1)
-            self.usb(PassthroughStates.ON)
-            self.on_reconnect()
-
-    def _set_usb_passthrough_mode(self, mode):
-        """Makes the underlying Monsoon call to set passthrough mode."""
-        raise NotImplementedError()
-
-    def reconnect_monsoon(self):
-        """Reconnects the Monsoon Serial/USB connection."""
-        raise NotImplementedError()
-
-    def is_allocated(self):
-        """Whether the resource is locked."""
-        raise NotImplementedError()
-
-    def release_monsoon_connection(self):
-        """Releases the underlying monsoon Serial or USB connection.
-
-        Useful for allowing other processes access to the device.
-        """
-        raise NotImplementedError()
-
-    def establish_monsoon_connection(self):
-        """Establishes the underlying monsoon Serial or USB connection."""
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/monsoon_lib/sampling/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/common.py b/src/antlion/controllers/monsoon_lib/sampling/common.py
deleted file mode 100644
index 7db8baf..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/common.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class UncalibratedSampleChunk(object):
-    """An uncalibrated sample collection stored with its calibration data.
-
-    These objects are created by the SampleChunker Transformer and read by
-    the CalibrationApplier Transformer.
-
-    Attributes:
-        samples: the uncalibrated samples list
-        calibration_data: the data used to calibrate the samples.
-    """
-
-    def __init__(self, samples, calibration_data):
-        self.samples = samples
-        self.calibration_data = calibration_data
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/engine/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/assembly_line.py b/src/antlion/controllers/monsoon_lib/sampling/engine/assembly_line.py
deleted file mode 100644
index 88cc733..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/assembly_line.py
+++ /dev/null
@@ -1,328 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import queue
-from concurrent.futures import ThreadPoolExecutor
-import multiprocessing
-
-
-class AssemblyLine(object):
-    """A class for passing data through a chain of threads or processes,
-    assembly-line style.
-
-    Attributes:
-        nodes: A list of AssemblyLine.Nodes that pass data from one node to the
-            next.
-    """
-
-    class Node(object):
-        """A Node in an AssemblyLine.
-
-        Each node is composed of the following:
-
-         input_stream                    output_stream
-        ==============> [ transformer ] ===============>
-
-        Attributes:
-            transformer: The Transformer that takes input from the input
-                stream, transforms the data, and sends it to the output stream.
-            input_stream: The stream of data to be taken in as input to this
-                transformer. This stream is the stream to be registered as the
-                previous node's output stream.
-
-        Properties:
-            output_stream: The stream of data to be passed to the next node.
-        """
-
-        def __init__(self, transformer=None, input_stream=None):
-            self.transformer = transformer
-            self.input_stream = input_stream
-
-        @property
-        def output_stream(self):
-            return self.transformer.output_stream
-
-        @output_stream.setter
-        def output_stream(self, value):
-            self.transformer.output_stream = value
-
-    def __init__(self, nodes):
-        """Initializes an AssemblyLine class.
-
-        nodes:
-            A list of AssemblyLine.Node objects.
-        """
-        self.nodes = nodes
-
-    def run(self):
-        """Runs the AssemblyLine, passing the data between each work node."""
-        raise NotImplementedError()
-
-
-class ProcessAssemblyLine(AssemblyLine):
-    """An AssemblyLine that uses processes to schedule work on nodes."""
-
-    def run(self):
-        """Runs the AssemblyLine within a process pool."""
-        if not self.nodes:
-            # If self.nodes is empty, it will create a multiprocessing.Pool of
-            # 0 nodes, which raises a ValueError.
-            return
-
-        process_pool = multiprocessing.Pool(processes=len(self.nodes))
-        for node in self.nodes:
-            process_pool.apply_async(node.transformer.transform,
-                                     [node.input_stream])
-        process_pool.close()
-        process_pool.join()
-
-
-class ThreadAssemblyLine(AssemblyLine):
-    """An AssemblyLine that uses threading to schedule work on nodes."""
-
-    def run(self):
-        """Runs the AssemblyLine within a thread pool."""
-        with ThreadPoolExecutor(max_workers=len(self.nodes)) as thread_pool:
-            for node in self.nodes:
-                thread_pool.submit(node.transformer.transform,
-                                   node.input_stream)
-
-
-class AssemblyLineBuilder(object):
-    """An abstract class that builds an AssemblyLine object.
-
-    Attributes:
-    _assembly_line_generator: The callable that creates the AssemblyLine.
-        Should be in the form of:
-
-            Args:
-                A list of AssemblyLine.Node objects.
-
-            Returns:
-                An AssemblyLine object.
-
-    _queue_generator: The callable that creates new queues to be used for
-        BufferStreams. Should be in the form of:
-
-            Args:
-                None.
-
-            Returns:
-                A Queue object.
-    """
-
-    def __init__(self, queue_generator, assembly_line_generator):
-        """Creates an AssemblyLineBuilder.
-
-        Args:
-            queue_generator: A callable of type lambda: Queue().
-            assembly_line_generator: A callable of type
-                lambda list<AssemblyLine.Node>: AssemblyLine.
-        """
-        super().__init__()
-        self._assembly_line_generator = assembly_line_generator
-        self._queue_generator = queue_generator
-
-        self.nodes = []
-        self._built = False
-
-    @property
-    def built(self):
-        return self._built
-
-    def __generate_queue(self):
-        """Returns a new Queue object for passing information between nodes."""
-        return self._queue_generator()
-
-    @property
-    def queue_generator(self):
-        """Returns the callable used for generating queues."""
-        return self._queue_generator
-
-    def source(self, transformer, input_stream=None):
-        """Adds a SourceTransformer to the AssemblyLine.
-
-        Must be the first function call on the AssemblyLineBuilder.
-
-        Args:
-            transformer: The SourceTransformer that generates data for the
-                AssemblyLine to process.
-            input_stream: The input stream to use, if necessary.
-
-        Raises:
-            ValueError if source is not the first transformer to be added to
-                the AssemblyLine, or the AssemblyLine has been built.
-        """
-        if self.nodes:
-            raise ValueError('AssemblyLines can only have a single source.')
-        if input_stream is None:
-            input_stream = DevNullBufferStream()
-        self.nodes.append(AssemblyLine.Node(transformer, input_stream))
-        return self
-
-    def into(self, transformer):
-        """Adds the given transformer next in the AssemblyLine.
-
-        Args:
-            transformer: The transformer next in the AssemblyLine.
-
-        Raises:
-            ValueError if no source node is set, or the AssemblyLine has been
-                built.
-        """
-        if not self.nodes:
-            raise ValueError('The source transformer must be set first.')
-        if self.built:
-            raise ValueError('Cannot add additional nodes after the '
-                             'AssemblyLine has been built.')
-        stream = BufferStream(self.__generate_queue())
-        self.nodes[-1].transformer.set_output_stream(stream)
-        self.nodes.append(AssemblyLine.Node(transformer, stream))
-        return self
-
-    def build(self, output_stream=None):
-        """Builds the AssemblyLine object.
-
-        Note that after this function is called this AssemblyLineBuilder cannot
-        be used again, as it is already marked as built.
-        """
-        if self.built:
-            raise ValueError('The AssemblyLine is already built.')
-        if not self.nodes:
-            raise ValueError('Cannot create an empty assembly line.')
-        self._built = True
-        if output_stream is None:
-            output_stream = DevNullBufferStream()
-        self.nodes[-1].output_stream = output_stream
-        return self._assembly_line_generator(self.nodes)
-
-
-class ThreadAssemblyLineBuilder(AssemblyLineBuilder):
-    """An AssemblyLineBuilder for generating ThreadAssemblyLines."""
-
-    def __init__(self, queue_generator=queue.Queue):
-        super().__init__(queue_generator, ThreadAssemblyLine)
-
-
-class ProcessAssemblyLineBuilder(AssemblyLineBuilder):
-    """An AssemblyLineBuilder for ProcessAssemblyLines.
-
-    Attributes:
-        manager: The multiprocessing.Manager used for having queues communicate
-            with one another over multiple processes.
-    """
-
-    def __init__(self):
-        self.manager = multiprocessing.Manager()
-        super().__init__(self.manager.Queue, ProcessAssemblyLine)
-
-
-class IndexedBuffer(object):
-    """A buffer indexed with the order it was generated in."""
-
-    def __init__(self, index, size_or_buffer):
-        """Creates an IndexedBuffer.
-
-        Args:
-            index: The integer index associated with the buffer.
-            size_or_buffer:
-                either:
-                    An integer specifying the number of slots in the buffer OR
-                    A list to be used as a buffer.
-        """
-        self.index = index
-        if isinstance(size_or_buffer, int):
-            self.buffer = [None] * size_or_buffer
-        else:
-            self.buffer = size_or_buffer
-
-
-class BufferList(list):
-    """A list of Buffers.
-
-    This type is useful for differentiating when a buffer has been returned
-    from a transformer, vs when a list of buffers has been returned from a
-    transformer.
-    """
-
-
-class BufferStream(object):
-    """An object that acts as a stream between two transformers."""
-
-    # The object passed to the buffer queue to signal the end-of-stream.
-    END = None
-
-    def __init__(self, buffer_queue):
-        """Creates a new BufferStream.
-
-        Args:
-            buffer_queue: A Queue object used to pass data along the
-                BufferStream.
-        """
-        self._buffer_queue = buffer_queue
-
-    def initialize(self):
-        """Initializes the stream.
-
-        When running BufferStreams through multiprocessing, initialize must
-        only be called on the process using the BufferStream.
-        """
-        # Here we need to make any call to the stream to initialize it. This
-        # makes read and write times for the first buffer faster, preventing
-        # the data at the beginning from being dropped.
-        self._buffer_queue.qsize()
-
-    def end_stream(self):
-        """Closes the stream.
-
-        By convention, a None object is used, mirroring file reads returning
-        an empty string when the end of file is reached.
-        """
-        self._buffer_queue.put(None, block=False)
-
-    def add_indexed_buffer(self, buffer):
-        """Adds the given buffer to the buffer stream."""
-        self._buffer_queue.put(buffer, block=False)
-
-    def remove_indexed_buffer(self):
-        """Removes an indexed buffer from the array.
-
-        This operation blocks until data is received.
-
-        Returns:
-            an IndexedBuffer.
-        """
-        return self._buffer_queue.get()
-
-
-class DevNullBufferStream(BufferStream):
-    """A BufferStream that is always empty."""
-
-    def __init__(self, *_):
-        super().__init__(None)
-
-    def initialize(self):
-        """Does nothing. Nothing to initialize."""
-
-    def end_stream(self):
-        """Does nothing. The stream always returns end-of-stream when read."""
-
-    def add_indexed_buffer(self, buffer):
-        """Imitating /dev/null, nothing will be written to the stream."""
-
-    def remove_indexed_buffer(self):
-        """Always returns the end-of-stream marker."""
-        return None
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/calibration.py b/src/antlion/controllers/monsoon_lib/sampling/engine/calibration.py
deleted file mode 100644
index 1e531ed..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/calibration.py
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class CalibrationError(Exception):
-    """Raised when a value is requested before it is properly calibrated."""
-
-
-class CalibrationCollection(object):
-    """The interface for keeping track of calibration values.
-
-    This class is an abstract representation of a collection of Calibration
-    values. Some CalibrationCollections may simply be a dictionary that returns
-    values given to it (see CalibrationScalars). Others may accept multiple
-    values and return the average for a set rolling window (see
-    CalibrationWindow).
-
-    Whichever the implementation, this interface gives end-users a way of
-    setting and querying a collection of calibration data that comes from a
-    Monsoon device.
-    """
-
-    def add(self, channel, origin, granularity, value):
-        """Adds a value to the calibration storage.
-
-        The passed in channel, origin, and granularity arguments will be used
-        as a key to handle and store the value passed in.
-
-        Args:
-            channel: The channel this value comes from. See
-                MonsoonConstants.Channel.
-            origin: The origin type for this value. See MonsoonConstants.Origin.
-            granularity: The granularity type for this value. See
-                MonsoonConstants.Granularity.
-            value: The value to set within the collection.
-        """
-        raise NotImplementedError()
-
-    def get_keys(self):
-        """Returns the list of possible keys for obtaining calibration data.
-
-        Not all possible (Channel, Origin, Granularity) combinations may be
-        available for all CalibrationCollections. It is also not guaranteed the
-        CalibrationCollection's key set is static.
-        """
-        raise NotImplementedError()
-
-    def get(self, channel, origin, granularity):
-        """Returns the calibration value for a given key."""
-        raise NotImplementedError()
-
-
-class CalibrationWindows(CalibrationCollection):
-    """A class that holds calibration data in sliding windows.
-
-    After the window size has been filled, a calibration value is removed every
-    time a new calibration value is added.
-    """
-
-    def __init__(self, calibration_window_size=5):
-        """Creates a collection of CalibrationWindows.
-
-        calibration_window_size: The number of entries in the rolling window to
-            consider for calibration.
-        """
-        super().__init__()
-        self._calibrations = dict()
-        self._calibration_window_size = calibration_window_size
-
-    def add(self, channel, origin, granularity, value):
-        """Adds the given value to the given calibration window.
-
-        Args:
-            channel: The channel being calibrated.
-            origin: The origin value being calibrated.
-            granularity: The granularity level being calibrated.
-            value: The calibration value.
-        """
-        window = self._calibrations[(channel, origin, granularity)]
-        if len(window) == self._calibration_window_size:
-            window.popleft()
-        window.append(value)
-
-    def get_keys(self):
-        return self._calibrations.keys()
-
-    def get(self, channel, origin, granularity):
-        window = self._calibrations[(channel, origin, granularity)]
-        if len(window) < self._calibration_window_size:
-            raise CalibrationError('%s is not calibrated yet.' % repr(
-                (channel, origin, granularity)))
-        return sum(window) / self._calibration_window_size
-
-
-class CalibrationScalars(CalibrationCollection):
-    """A collection of calibrations where scalar values are used.
-
-    Reading scalar calibration values are faster than calculating the
-    calibration value from rolling windows.
-    """
-
-    def __init__(self):
-        self._calibrations = dict()
-
-    def get_keys(self):
-        return self._calibrations.keys()
-
-    def add(self, channel, origin, granularity, value):
-        """Adds a value to the calibration storage.
-
-        Note that if a value is already within the collection, it will be
-        overwritten, since CalibrationScalars can only hold a single value.
-
-        Args:
-            channel: The channel being calibrated.
-            origin: The origin value being calibrated.
-            granularity: The granularity level being calibrated.
-            value: The calibration value.
-        """
-        self._calibrations[(channel, origin, granularity)] = value
-
-    def get(self, channel, origin, granularity):
-        return self._calibrations[(channel, origin, granularity)]
-
-
-class CalibrationSnapshot(CalibrationScalars):
-    """A collection of calibrations taken from another CalibrationCollection.
-
-    CalibrationSnapshot calculates all of the calibration values of another
-    CalibrationCollection and creates a snapshot of those values. This allows
-    the CalibrationWindows to continue getting new values while another thread
-    processes the calibration on previously gathered values.
-    """
-
-    def __init__(self, calibration_collection):
-        """Generates a CalibrationSnapshot from another CalibrationCollection.
-
-        Args:
-            calibration_collection: The CalibrationCollection to create a
-                snapshot of.
-        """
-        super().__init__()
-
-        if not isinstance(calibration_collection, CalibrationCollection):
-            raise ValueError('Argument must inherit from '
-                             'CalibrationCollection.')
-
-        for key in calibration_collection.get_keys():
-            try:
-                # key's type is tuple(Channel, Origin, Granularity)
-                value = calibration_collection.get(*key)
-            except CalibrationError as calibration_error:
-                # If uncalibrated, store the CalibrationError and raise when a
-                # user has asked for the value.
-                value = calibration_error
-            self._calibrations[key] = value
-
-    def get(self, channel, origin, granularity):
-        """Returns the calibration value for the given key.
-
-        Raises:
-            CalibrationError if the requested key is not calibrated.
-        """
-        value = self._calibrations[(channel, origin, granularity)]
-        if isinstance(value, CalibrationError):
-            # The user requested an uncalibrated value. Raise that error.
-            raise value
-        return value
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/transformer.py b/src/antlion/controllers/monsoon_lib/sampling/engine/transformer.py
deleted file mode 100644
index 080a69e..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/transformer.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import DevNullBufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import IndexedBuffer
-
-
-class Transformer(object):
-    """An object that represents how to transform a given buffer into a result.
-
-    Attributes:
-        output_stream: The stream to output data to upon transformation.
-            Defaults to a DevNullBufferStream.
-    """
-
-    def __init__(self):
-        self.output_stream = DevNullBufferStream(None)
-
-    def set_output_stream(self, output_stream):
-        """Sets the Transformer's output stream to the given output stream."""
-        self.output_stream = output_stream
-
-    def transform(self, input_stream):
-        """Transforms input_stream data and passes it to self.output_stream.
-
-        Args:
-            input_stream: The BufferStream of input data this transformer should
-                transform. Note that the type of data stored within BufferStream
-                is not guaranteed to be in the format expected, much like STDIN
-                is not guaranteed to be the format a process expects. However,
-                for performance, users should expect the data to be properly
-                formatted anyway.
-        """
-        input_stream.initialize()
-        self.output_stream.initialize()
-        class_name = self.__class__.__qualname__
-        try:
-            logging.debug('%s transformer beginning.', class_name)
-            self.on_begin()
-            logging.debug('%s transformation started.', class_name)
-            self._transform(input_stream)
-        except Exception:
-            # TODO(markdr): Get multi-process error reporting to play nicer.
-            logging.exception('%s ran into an exception.', class_name)
-            raise
-        finally:
-            logging.debug('%s transformation ended.', class_name)
-            self.on_end()
-            logging.debug('%s finished.', class_name)
-
-    def _transform_buffer(self, buffer):
-        """Transforms a given buffer.
-
-        The implementation can either:
-
-        1) Return the transformed buffer. Can be either in-place or a new
-           buffer.
-
-        2) Return a BufferList: a list of transformed buffers. This is useful
-           for grouping data together for faster operations.
-
-        Args:
-            buffer: The buffer to transform
-
-        Returns:
-            either a buffer or a BufferList. See detailed documentation.
-        """
-        raise NotImplementedError()
-
-    def _on_end_of_stream(self, input_stream):
-        """To be called when the input stream has sent the end of stream signal.
-
-        This is particularly useful for flushing any stored memory into the
-        output stream.
-
-        Args:
-            input_stream: the stream that was closed.
-        """
-        # By default, this function closes the output stream.
-        self.output_stream.end_stream()
-
-    def _transform(self, input_stream):
-        """Should call _transform_buffer within this function."""
-        raise NotImplementedError()
-
-    def on_begin(self):
-        """A function called before the transform loop begins."""
-
-    def on_end(self):
-        """A function called after the transform loop has ended."""
-
-
-class SourceTransformer(Transformer):
-    """The base class for generating data in an AssemblyLine.
-
-    Note that any Transformer will be able to generate data, but this class is
-    a generic way to send data.
-
-    Attributes:
-        _buffer_size: The buffer size for each IndexedBuffer sent over the
-            output stream.
-    """
-
-    def __init__(self):
-        super().__init__()
-        # Defaulted to 64, which is small enough to be passed within the .6ms
-        # window, but large enough so that it does not spam the queue.
-        self._buffer_size = 64
-
-    def _transform(self, _):
-        """Generates data and sends it to the output stream."""
-        buffer_index = 0
-        while True:
-            indexed_buffer = IndexedBuffer(buffer_index, self._buffer_size)
-            buffer = self._transform_buffer(indexed_buffer.buffer)
-            if buffer is BufferStream.END:
-                break
-            indexed_buffer.buffer = buffer
-            self.output_stream.add_indexed_buffer(indexed_buffer)
-            buffer_index += 1
-
-        self.output_stream.end_stream()
-
-    def _transform_buffer(self, buffer):
-        """Fills the passed-in buffer with data."""
-        raise NotImplementedError()
-
-
-class SequentialTransformer(Transformer):
-    """A transformer that receives input in sequential order.
-
-    Attributes:
-        _next_index: The index of the next IndexedBuffer that should be read.
-    """
-
-    def __init__(self):
-        super().__init__()
-        self._next_index = 0
-
-    def _transform(self, input_stream):
-        while True:
-            indexed_buffer = input_stream.remove_indexed_buffer()
-            if indexed_buffer is BufferStream.END:
-                break
-            buffer_or_buffers = self._transform_buffer(indexed_buffer.buffer)
-            if buffer_or_buffers is not None:
-                self._send_buffers(buffer_or_buffers)
-
-        self._on_end_of_stream(input_stream)
-
-    def _send_buffers(self, buffer_or_buffer_list):
-        """Sends buffers over to the output_stream.
-
-        Args:
-            buffer_or_buffer_list: A BufferList or buffer object. Note that if
-                buffer is None, it is effectively an end-of-stream signal.
-        """
-        if not isinstance(buffer_or_buffer_list, BufferList):
-            # Assume a single buffer was returned
-            buffer_or_buffer_list = BufferList([buffer_or_buffer_list])
-
-        buffer_list = buffer_or_buffer_list
-        for buffer in buffer_list:
-            new_buffer = IndexedBuffer(self._next_index, buffer)
-            self.output_stream.add_indexed_buffer(new_buffer)
-            self._next_index += 1
-
-    def _transform_buffer(self, buffer):
-        raise NotImplementedError()
-
-
-class ParallelTransformer(Transformer):
-    """A Transformer that is capable of running in parallel.
-
-    Buffers received may be unordered. For ordered input, use
-    SequentialTransformer.
-    """
-
-    def _transform(self, input_stream):
-        while True:
-            indexed_buffer = input_stream.remove_indexed_buffer()
-            if indexed_buffer is None:
-                break
-            buffer = self._transform_buffer(indexed_buffer.buffer)
-            indexed_buffer.buffer = buffer
-            self.output_stream.add_indexed_buffer(indexed_buffer)
-
-        self._on_end_of_stream(input_stream)
-
-    def _transform_buffer(self, buffer):
-        """Transforms a given buffer.
-
-        Note that ParallelTransformers can NOT return a BufferList. This is a
-        limitation with the current indexing system. If the input buffer is
-        replaced with multiple buffers, later transformers will not know what
-        the proper order of buffers is.
-
-        Args:
-            buffer: The buffer to transform
-
-        Returns:
-            either None or a buffer. See detailed documentation.
-        """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/transformers.py b/src/antlion/controllers/monsoon_lib/sampling/engine/transformers.py
deleted file mode 100644
index a9bdee1..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/transformers.py
+++ /dev/null
@@ -1,246 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import numpy as np
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-
-
-class Tee(SequentialTransformer):
-    """Outputs main_current values to the specified file.
-
-    Attributes:
-        _filename: the name of the file to open.
-        _fd: the filestream written to.
-    """
-
-    def __init__(self, filename, measure_after_seconds=0):
-        """Creates an OutputStream.
-
-        Args:
-            filename: the path to the file to write the collected data to.
-            measure_after_seconds: the number of seconds to skip before
-                logging data as part of the measurement.
-        """
-        super().__init__()
-        self._filename = filename
-        self._fd = None
-        self.measure_after_seconds = measure_after_seconds
-        # The time of the first sample gathered.
-        self._start_time = None
-
-    def on_begin(self):
-        self._fd = open(self._filename, 'w+')
-
-    def on_end(self):
-        self._fd.close()
-
-    def _transform_buffer(self, buffer):
-        """Writes the reading values to a file.
-
-        Args:
-            buffer: A list of HvpmReadings.
-        """
-        for sample in buffer:
-            if self._start_time is None:
-                self._start_time = sample.sample_time
-            if (sample.sample_time - self._start_time <
-                    self.measure_after_seconds):
-                continue
-            self._fd.write('%0.9f %.12f\n' %
-                           (sample.sample_time, sample.main_current))
-        self._fd.flush()
-        return BufferList([buffer])
-
-
-class PerfgateTee(SequentialTransformer):
-    """Outputs records of nanoseconds,current,voltage to the specified file.
-
-    Similar to Tee, but this version includes voltage, which may help with
-    accuracy in the power calculations.
-
-    This output type can be enabled by passing this transformer to the
-    transformers kwarg in Monsoon.measure_power():
-
-    # Uses the default Tee
-    > monsoon.measure_power(..., output_path=filename])
-
-    # Uses PerfgateTee
-    > monsoon.measure_power(..., transformers=[PerfgateTee(filename)])
-
-    Attributes:
-        _filename: the name of the file to open.
-        _fd: the filestream written to.
-    """
-
-    def __init__(self, filename, measure_after_seconds=0):
-        """Creates an OutputStream.
-
-        Args:
-            filename: the path to the file to write the collected data to.
-            measure_after_seconds: the number of seconds to skip before logging
-              data as part of the measurement.
-        """
-        super().__init__()
-        self._filename = filename
-        self._fd = None
-        self.measure_after_seconds = measure_after_seconds
-        # The time of the first sample gathered.
-        self._start_time = None
-
-    def on_begin(self):
-        self._fd = open(self._filename, 'w+')
-
-    def on_end(self):
-        self._fd.close()
-
-    def _transform_buffer(self, buffer):
-        """Writes the reading values to a file.
-
-            Args:
-                buffer: A list of HvpmReadings.
-        """
-        for sample in buffer:
-            if self._start_time is None:
-                self._start_time = sample.sample_time
-            if (sample.sample_time - self._start_time <
-                    self.measure_after_seconds):
-                continue
-            self._fd.write(
-                '%i,%.6f,%.6f\n' %
-                (sample.sample_time * 1e9, sample.main_current,
-                 sample.main_voltage))
-        self._fd.flush()
-        return BufferList([buffer])
-
-
-class SampleAggregator(ParallelTransformer):
-    """Aggregates the main current value and the number of samples gathered."""
-
-    def __init__(self, start_after_seconds=0):
-        """Creates a new SampleAggregator.
-
-        Args:
-            start_after_seconds: The number of seconds to wait before gathering
-                data. Useful for allowing the device to settle after USB
-                disconnect.
-        """
-        super().__init__()
-        self._num_samples = 0
-        self._sum_currents = 0
-        self.start_after_seconds = start_after_seconds
-        # The time of the first sample gathered.
-        self._start_time = None
-
-    def _transform_buffer(self, buffer):
-        """Aggregates the sample data.
-
-        Args:
-            buffer: A buffer of H/LvpmReadings.
-        """
-        for sample in buffer:
-            if self._start_time is None:
-                self._start_time = sample.sample_time
-            if sample.sample_time - self._start_time < self.start_after_seconds:
-                continue
-            self._num_samples += 1
-            self._sum_currents += sample.main_current
-        return buffer
-
-    @property
-    def num_samples(self):
-        """The number of samples read from the device."""
-        return self._num_samples
-
-    @property
-    def sum_currents(self):
-        """The total sum of current values gathered so far."""
-        return self._sum_currents
-
-
-class DownSampler(SequentialTransformer):
-    """Takes in sample outputs and returns a downsampled version of that data.
-
-    Note for speed, the downsampling must occur at a perfect integer divisor of
-    the Monsoon's sample rate (5000 hz).
-    """
-    _MONSOON_SAMPLE_RATE = 5000
-
-    def __init__(self, downsample_factor):
-        """Creates a DownSampler Transformer.
-
-        Args:
-            downsample_factor: The number of samples averaged together for a
-                single output sample.
-        """
-        super().__init__()
-
-        self._mean_width = int(downsample_factor)
-        self._leftovers = []
-
-    def _transform_buffer(self, buffer):
-        """Returns the buffer downsampled by an integer factor.
-
-        The algorithm splits data points into three categories:
-
-            tail: The remaining samples where not enough were collected to
-                  reach the integer factor for downsampling. The tail is stored
-                  in self._leftovers between _transform_buffer calls.
-            tailless_buffer: The samples excluding the tail that can be
-                             downsampled directly.
-
-        Below is a diagram explaining the buffer math:
-
-        input:          input buffer n              input buffer n + 1
-                 ╔══════════════════════════╗  ╔══════════════════════════╗
-             ... ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗ ║  ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗ ║ ...
-                 ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝ ║  ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝ ║
-                 ╚══════════════════════════╝  ╚══════════════════════════╝
-                               ▼                             ▼
-        alg:     ╔═════════════════════╦════╗  ╔═════════════════════╦════╗
-                 ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗║╔╗╔╗║  ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗║╔╗╔╗║
-                 ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝║╚╝╚╝║  ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝║╚╝╚╝║
-             ... ║   tailless_buffer   ║tail║  ║   tailless_buffer   ║tail║ ...
-                 ╚═════════════════════╩════╝  ╚═════════════════════╩════╝
-               ──┬───┘ └─┬─┘ ...  └─┬─┘ └────┬─────┘ └─┬─┘ ...  └─┬─┘ └──┬───
-                 ╔╗      ╔╗ ╔╗  ╔╗ ╔╗        ╔╗        ╔╗ ╔╗  ╔╗ ╔╗      ╔╗
-                 ╚╝      ╚╝ ╚╝  ╚╝ ╚╝        ╚╝        ╚╝ ╚╝  ╚╝ ╚╝      ╚╝
-                 └─────────┬────────┘        └──────────┬─────────┘
-                           ▼                            ▼
-        output:   ╔════════════════╗           ╔════════════════╗
-                  ║ ╔╗ ╔╗ ╔╗ ╔╗ ╔╗ ║           ║ ╔╗ ╔╗ ╔╗ ╔╗ ╔╗ ║
-                  ║ ╚╝ ╚╝ ╚╝ ╚╝ ╚╝ ║           ║ ╚╝ ╚╝ ╚╝ ╚╝ ╚╝ ║
-                  ╚════════════════╝           ╚════════════════╝
-                   output buffer n             output buffer n + 1
-        """
-        tail_length = int(
-            (len(buffer) + len(self._leftovers)) % self._mean_width)
-
-        tailless_buffer = np.array(buffer[:len(buffer) - tail_length])
-
-        sample_count = len(tailless_buffer) + len(self._leftovers)
-
-        downsampled_values = np.mean(
-            np.resize(
-                np.append(self._leftovers, tailless_buffer),
-                (sample_count // self._mean_width, self._mean_width)),
-            axis=1)
-
-        self._leftovers = buffer[len(buffer) - tail_length:]
-
-        return downsampled_values
diff --git a/src/antlion/controllers/monsoon_lib/sampling/enums.py b/src/antlion/controllers/monsoon_lib/sampling/enums.py
deleted file mode 100644
index 5fc30c9..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/enums.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class Origin:
-    """The origin types of a given measurement or calibration.
-
-    The Monsoon returns calibration packets for three types of origin:
-
-        ZERO: The calibrated zeroing point.
-        REFERENCE: The reference point used for the returned samples.
-        SCALE: The factor at which to scale the returned samples to get power
-               consumption data.
-    """
-    ZERO = 0
-    REFERENCE = 1
-    SCALE = 2
-
-    values = [ZERO, REFERENCE, SCALE]
-
-
-class Granularity:
-    """The granularity types.
-
-    Monsoon leverages two different granularities when returning power
-    measurements. If the power usage exceeds the threshold of the fine
-    measurement region, a coarse measurement will be used instead.
-
-    This also means that there need to be two calibration values: one for coarse
-    and one for fine.
-    """
-    COARSE = 0
-    FINE = 1
-
-    values = [COARSE, FINE]
-
-
-class Reading:
-    """The extraneous possible reading types.
-
-    Aside from coarse and fine readings (see Granularity), some Monsoons can
-    gather readings on the voltage and gain control.
-    """
-    VOLTAGE = 0x4
-    GAIN = 0x6
-
-    values = [VOLTAGE, GAIN]
-
-
-class Channel:
-    """The possible channel types.
-
-    Monsoons can read power measurements from the following three inputs.
-    Calibration and reading values may also be available on these channels.
-    """
-    MAIN = 0
-    USB = 1
-    AUX = 2
-
-    values = [MAIN, USB, AUX]
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/calibrations.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/calibrations.py
deleted file mode 100644
index d07d404..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/calibrations.py
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-from collections import deque
-
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationScalars
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationWindows
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import SampleType
-
-
-class HvpmCalibrationData(CalibrationWindows):
-    """An object that holds the Dynamic Calibration values for HVPM Sampling."""
-
-    def __init__(self, calibration_window_size=5):
-        super().__init__(calibration_window_size)
-
-        all_variable_sets = [
-            Channel.values,
-            (Origin.REFERENCE, Origin.ZERO),
-            Granularity.values
-        ]  # yapf: disable
-
-        for key in itertools.product(*all_variable_sets):
-            self._calibrations[key] = deque()
-
-    def add_calibration_sample(self, sample):
-        """Adds calibration values from a calibration sample.
-
-        The packet is formatted the following way:
-            [0]: MAIN, COARSE
-            [1]: MAIN, FINE
-            [2]: USB,  COARSE
-            [3]: USB,  FINE
-            [4]: AUX,  COARSE
-            [5]: AUX,  FINE
-            [...]: ?
-            [8]: 0x10 == Origin.ZERO
-                 0x30 == Origin.REFERENCE
-        """
-        sample_type = sample.get_sample_type()
-        if sample_type == SampleType.ZERO_CAL:
-            origin = Origin.ZERO
-        elif sample_type == SampleType.REF_CAL:
-            origin = Origin.REFERENCE
-        else:
-            raise ValueError(
-                'Packet of type %s is not a calibration packet.' % sample_type)
-
-        for i in range(6):
-            # Reads the last bit to get the Granularity value.
-            granularity = i & 0x01
-            # Divides by 2 to get the Channel value.
-            channel = i >> 1
-            self.add(channel, origin, granularity,
-                     sample[channel, granularity])
-
-
-class HvpmCalibrationConstants(CalibrationScalars):
-    """Tracks the calibration values gathered from the Monsoon status packet."""
-
-    def __init__(self, monsoon_status_packet):
-        """Initializes the calibration constants."""
-        super().__init__()
-
-        # Invalid combinations:
-        #   *,   REFERENCE, *
-        #   AUX, ZERO,      *
-        all_variable_sets = [
-            Channel.values,
-            (Origin.SCALE, Origin.ZERO),
-            Granularity.values
-        ]  # yapf: disable
-
-        for key in itertools.product(*all_variable_sets):
-            if key[0] == Channel.AUX and key[1] == Origin.ZERO:
-                # Monsoon status packets do not contain AUX, ZERO readings.
-                # Monsoon defaults these values to 0:
-                self._calibrations[key] = 0
-            else:
-                self._calibrations[key] = getattr(
-                    monsoon_status_packet,
-                    build_status_packet_attribute_name(*key))
-
-
-# TODO(markdr): Potentially find a better home for this function.
-def build_status_packet_attribute_name(channel, origin, granularity):
-    """Creates the status packet attribute name from the given keys.
-
-    The HVPM Monsoon status packet returns values in the following format:
-
-        <channel><Granularity><Origin>
-
-    Note that the following combinations are invalid:
-        <channel><Granularity>Reference
-        aux<Granularity>ZeroOffset
-
-    Args:
-        channel: the Channel value of the attribute
-        origin: the Origin value of the attribute
-        granularity: the Granularity value of the attribute
-
-    Returns:
-        A string that corresponds to the attribute of the Monsoon status packet.
-    """
-    if channel == Channel.MAIN:
-        channel = 'main'
-    elif channel == Channel.USB:
-        channel = 'usb'
-    elif channel == Channel.AUX:
-        channel = 'aux'
-    else:
-        raise ValueError('Unknown channel "%s".' % channel)
-
-    if granularity == Granularity.COARSE:
-        granularity = 'Coarse'
-    elif granularity == Granularity.FINE:
-        granularity = 'Fine'
-    else:
-        raise ValueError('Invalid granularity "%s"' % granularity)
-
-    if origin == Origin.SCALE:
-        origin = 'Scale'
-    elif origin == Origin.ZERO:
-        origin = 'ZeroOffset'
-    else:
-        # Note: Origin.REFERENCE is not valid for monsoon_status_packet
-        # attribute names.
-        raise ValueError('Invalid origin "%s"' % origin)
-
-    return '%s%s%s' % (channel, granularity, origin)
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/packet.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/packet.py
deleted file mode 100644
index 8951400..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/packet.py
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import struct
-
-from antlion.controllers.monsoon_lib.sampling.enums import Reading
-
-
-class SampleType:
-    """An enum-like class that defines the SampleTypes for LVPM data.
-
-    Note that these values differ from the LVPM values.
-    """
-
-    # A measurement sample.
-    MEASUREMENT = 0x00
-
-    # A zero calibration sample.
-    ZERO_CAL = 0x10
-
-    # A reference calibration sample.
-    REF_CAL = 0x30
-
-    @staticmethod
-    def is_calibration(value):
-        """Returns true iff the SampleType is a type of calibration."""
-        return bool(value & 0x10)
-
-
-class HvpmMeasurement(object):
-    """An object that represents a single measurement from the HVPM device.
-
-    Attributes:
-        _sample_time: The time the sample was taken.
-        values: From the Monsoon API doc, the values are as follows:
-
-    Val │  Byte  │  Type  | Monsoon │ Reading │
-    Pos │ Offset │ Format │ Channel │  Type   │ Description
-    ────┼────────┼────────┼─────────┼─────────┼──────────────────────────────
-     0  │    0   │ uint16 │  Main   │ Coarse  │ Calibration/Measurement value
-     1  │    2   │ uint16 │  Main   │ Fine    │ Calibration/Measurement value
-     2  │    4   │ uint16 │  USB    │ Coarse  │ Calibration/Measurement value
-     3  │    6   │ uint16 │  USB    │ Fine    │ Calibration/Measurement value
-     4  │    8   │ uint16 │  Aux    │ Coarse  │ Calibration/Measurement value
-     5  │   10   │ uint16 │  Aux    │ Fine    │ Calibration/Measurement value
-     6  │   12   │ uint16 │  Main   │ Voltage │ Main V measurement, or Aux V
-        │        │        │         │         │    if setVoltageChannel == 1
-     7  │   14   │ uint16 │  USB    │ Voltage │ USB Voltage
-    ╔══════════════════════════════════════════════════════════════════════╗
-    ║ Note: The Monsoon API Doc puts the below values in the wrong order.  ║
-    ║       The values in this docstring are in the correct order.         ║
-    ╚══════════════════════════════════════════════════════════════════════╝
-     8  │   16   │ uint8? │  USB    │ Gain    │ Measurement gain control.
-        │        │        │         │         │  * Structure Unknown. May be
-        │        │        │         │         │    similar to Main Gain.
-     9  │   17   │ uint8  │  Main   │ Gain    │ Measurement gain control.
-        │        │        │         │         │  * b0-3: Believed to be gain.
-        │        │        │         │         │  * b4-5: SampleType.
-        │        │        │         │         │  * b6-7: Unknown.
-
-    """
-
-    # The total number of bytes in a measurement. See the table above.
-    SIZE = 18
-
-    def __init__(self, raw_data, sample_time):
-        self.values = struct.unpack('>8H2B', raw_data)
-        self._sample_time = sample_time
-
-    def __getitem__(self, channel_and_reading_granularity):
-        """Returns the requested reading for the given channel.
-
-        See HvpmMeasurement.__doc__ for a reference table.
-
-        Args:
-            channel_and_reading_granularity: A tuple of (channel,
-                reading_or_granularity).
-        """
-        channel = channel_and_reading_granularity[0]
-        reading_or_granularity = channel_and_reading_granularity[1]
-
-        data_index = self.get_index(channel, reading_or_granularity)
-
-        if reading_or_granularity == Reading.GAIN:
-            # The format of this value is undocumented by Monsoon Inc.
-            # Assume an unsigned 4-bit integer is used.
-            return self.values[data_index] & 0x0F
-        return self.values[data_index]
-
-    @staticmethod
-    def get_index(channel, reading_or_granularity):
-        """Returns the values array index that corresponds with the given query.
-
-        See HvpmMeasurement.__doc__ for details on how this is determined.
-
-        Args:
-            channel: The channel to read data from.
-            reading_or_granularity: The reading or granularity desired.
-
-        Returns:
-            An index corresponding to the data's location in self.values
-        """
-        if reading_or_granularity == Reading.VOLTAGE:
-            return 6 + channel
-        if reading_or_granularity == Reading.GAIN:
-            return 9 - channel
-        # reading_or_granularity is a granularity value.
-        return channel * 2 + reading_or_granularity
-
-    def get_sample_time(self):
-        """Returns the calculated time for the given sample."""
-        return self._sample_time
-
-    def get_sample_type(self):
-        """Returns a value contained in SampleType."""
-        return self.values[9] & 0x30
-
-
-class Packet(object):
-    """A packet collected directly from serial.read() during sample collection.
-
-    Large amounts of documentation here are pulled directly from
-    http://msoon.github.io/powermonitor/Python_Implementation/docs/API.pdf
-
-    For convenience, here is the table of values stored:
-
-    Offset │ Format │ Field            │ Description
-    ───────┼────────┼──────────────────┼────────────────────────────────────────
-       0   │ uint16 │ dropped_count    │ Number of dropped packets
-       2   │  bits  │ flags            │ Flag values. see self.flags property
-       3   │ uint8  │ num_measurements │ Number of measurements in this packet
-       4   │ byte[] │ measurement[0]   │ Measurement. See HvpmMeasurement class
-      22   │ byte[] │ measurement[1]   │ Optional Measurement. See above
-      44   │ byte[] │ measurement[2]   │ Optional Measurement. See above
-
-    Note that all of values except dropped_count are stored in big-endian
-    format.
-
-    Attributes:
-        _packet_data: The raw data received from the packet.
-        time_of_read: The unix timestamp this packet was collected at.
-        time_since_last_sample: The differential between this packet's
-            time_of_read and the previous packet's.
-    """
-
-    FIRST_MEASUREMENT_OFFSET = 8
-
-    # The maximum size of a packet read from USB.
-    # Note: each HVPM Packet can hold a maximum of 3 measurements.
-    MAX_PACKET_SIZE = FIRST_MEASUREMENT_OFFSET + HvpmMeasurement.SIZE * 3
-
-    def __init__(self, sampled_bytes):
-        self._packet_data = sampled_bytes
-
-        num_data_bytes = (len(sampled_bytes) - Packet.FIRST_MEASUREMENT_OFFSET)
-        self.num_measurements = num_data_bytes // HvpmMeasurement.SIZE
-
-        struct_string = (
-            '<2dhBx' +
-            (str(HvpmMeasurement.SIZE) + 's') * self.num_measurements)
-
-        # yapf: disable. Yapf forces these to try to fit one after the other.
-        (self.time_of_read,
-         self.time_since_last_sample,
-         self.dropped_count,
-         self.flags,
-         *samples) = struct.unpack(struct_string, sampled_bytes)
-        # yapf: enable
-
-        self.measurements = [None] * self.num_measurements
-
-        for i, raw_data in enumerate(samples):
-            self.measurements[i] = HvpmMeasurement(raw_data,
-                                                   self._get_sample_time(i))
-
-    def _get_sample_time(self, index):
-        """Returns the time the sample at the given index was received.
-
-        If multiple samples were captured within the same reading, the samples
-        are assumed to be uniformly distributed during the time it took to
-        sample the values.
-        """
-        time_per_sample = self.time_since_last_sample / self.num_measurements
-        return time_per_sample * (index + 1) + self.time_of_read
-
-    @property
-    def packet_counter(self):
-        """The 4-bit packet index."""
-        return self.flags & 0x0F
-
-    def get_bytes(self):
-        return list(self._packet_data)
-
-    def __getitem__(self, index):
-        return self.measurements[index]
-
-    def __len__(self):
-        return self.num_measurements
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/transformers.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/transformers.py
deleted file mode 100644
index 775c309..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/transformers.py
+++ /dev/null
@@ -1,476 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import array
-import logging
-import struct
-import time
-
-import numpy as np
-from Monsoon import HVPM
-
-from antlion.controllers.monsoon_lib.sampling.common import UncalibratedSampleChunk
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ProcessAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationError
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SourceTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import Transformer
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.enums import Reading
-from antlion.controllers.monsoon_lib.sampling.hvpm.calibrations import HvpmCalibrationConstants
-from antlion.controllers.monsoon_lib.sampling.hvpm.calibrations import HvpmCalibrationData
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import HvpmMeasurement
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import Packet
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import SampleType
-
-
-class HvpmTransformer(Transformer):
-    """Gathers samples from the Monsoon and brings them back to the caller."""
-
-    def __init__(self, monsoon_serial, duration):
-        super().__init__()
-        self.monsoon_serial = monsoon_serial
-        self.duration = duration
-
-    def _transform(self, input_stream):
-        # We need to gather the status packet before sampling so we can use the
-        # static calibration during sample normalization.
-        monsoon = HVPM.Monsoon()
-        monsoon.setup_usb(self.monsoon_serial)
-        monsoon.fillStatusPacket()
-        monsoon_status_packet = monsoon.statusPacket()
-        monsoon.closeDevice()
-
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ProcessAssemblyLineBuilder()
-         .source(PacketCollector(self.monsoon_serial, self.duration))
-         .into(SampleNormalizer(monsoon_status_packet=monsoon_status_packet))
-         .build(output_stream=self.output_stream).run())
-        # yapf: enable
-
-
-class PacketCollector(SourceTransformer):
-    """Collects Monsoon packets into a buffer to be sent to another transformer.
-
-    Ideally, the other transformer will be in a separate process to prevent the
-    GIL from slowing down packet collection.
-
-    Attributes:
-        _monsoon_id: The id of the monsoon.
-        _monsoon: The monsoon instance. This is left unset until
-                  _initialize_monsoon() is called.
-    """
-
-    def __init__(self, monsoon_id, sampling_duration=None):
-        super().__init__()
-        self._monsoon_id = monsoon_id
-        self._monsoon = None
-        self.start_time = None
-        self.array = array.array('B', b'\x00' * Packet.MAX_PACKET_SIZE)
-        self.sampling_duration = sampling_duration
-
-    def _initialize_monsoon(self):
-        """Initializes the monsoon object.
-
-        Note that this must be done after the Transformer has started.
-        Otherwise, this transformer will have c-like objects, preventing
-        the transformer from being used with the multiprocess libraries.
-        """
-        self._monsoon = HVPM.Monsoon()
-        self._monsoon.setup_usb(self._monsoon_id)
-        self._monsoon.stopSampling()
-        self._monsoon.fillStatusPacket()
-        self._monsoon.StartSampling()
-
-    def on_begin(self):
-        if __debug__:
-            logging.warning(
-                'Debug mode is enabled. Expect a higher frequency of dropped '
-                'packets. To reduce packet drop, disable your python debugger.'
-            )
-
-        self.start_time = time.time()
-        self._initialize_monsoon()
-
-    def __del__(self):
-        if self._monsoon:
-            self.on_end()
-
-    def on_end(self):
-        self._monsoon.stopSampling()
-        self._monsoon.closeDevice()
-
-    def _transform_buffer(self, buffer):
-        """Fills the buffer with packets until time has been reached.
-
-        Returns:
-            A BufferList of a single buffer if collection is not yet finished.
-            None if sampling is complete.
-        """
-        if (self.sampling_duration
-                and self.sampling_duration < time.time() - self.start_time):
-            return None
-
-        for index in range(len(buffer)):
-            time_before_read = time.time()
-            try:
-                data = self._monsoon.Protocol.DEVICE.read(
-                    # Magic value for USB bulk reads.
-                    0x81,
-                    Packet.MAX_PACKET_SIZE,
-                    # In milliseconds.
-                    timeout=1000)
-            except Exception as e:
-                logging.warning(e)
-                continue
-            time_after_read = time.time()
-            time_data = struct.pack('dd', time_after_read,
-                                    time_after_read - time_before_read)
-            buffer[index] = time_data + data.tobytes()
-
-        return buffer
-
-
-class SampleNormalizer(Transformer):
-    """A Transformer that applies calibration to the input's packets."""
-
-    def __init__(self, monsoon_status_packet):
-        """Creates a SampleNormalizer.
-
-        Args:
-            monsoon_status_packet: The status of the monsoon. Used for gathering
-                the constant calibration data from the device.
-        """
-        super().__init__()
-        self.monsoon_status_packet = monsoon_status_packet
-
-    def _transform(self, input_stream):
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ThreadAssemblyLineBuilder()
-         .source(PacketReader(), input_stream=input_stream)
-         .into(SampleChunker())
-         .into(CalibrationApplier(self.monsoon_status_packet))
-         .build(output_stream=self.output_stream).run())
-        # yapf: enable
-
-
-class PacketReader(ParallelTransformer):
-    """Reads raw HVPM Monsoon data and converts it into Packet objects.
-
-    Attributes:
-        rollover_count: The number of times the dropped_count value has rolled
-            over it's maximum value (2^16-1).
-        previous_dropped_count: The dropped count read from the last packet.
-            Used for determining the true number of dropped samples.
-        start_time: The time of the first packet ever read.
-    """
-    """The number of seconds before considering dropped_count to be meaningful.
-
-    Monsoon devices will often report 2^16-1 as the dropped count when first
-    starting the monsoon. This usually goes away within a few milliseconds.
-    """
-    DROP_COUNT_TIMER_THRESHOLD = 1
-
-    def __init__(self):
-        super().__init__()
-        self.rollover_count = 0
-        self.previous_dropped_count = 0
-        self.start_time = 0
-
-    def _transform_buffer(self, buffer):
-        """Reads raw sample data and converts it into packet objects."""
-
-        for i in range(len(buffer)):
-            buffer[i] = Packet(buffer[i])
-
-            if buffer and not self.start_time and i == 0:
-                self.start_time = buffer[0].time_of_read
-
-            if (buffer[i].time_of_read - self.start_time >
-                    PacketReader.DROP_COUNT_TIMER_THRESHOLD):
-                self._process_dropped_count(buffer[i])
-
-        return buffer
-
-    def _process_dropped_count(self, packet):
-        """Processes the dropped count value, updating the internal counters."""
-        if packet.dropped_count == self.previous_dropped_count:
-            return
-
-        if packet.dropped_count < self.previous_dropped_count:
-            self.rollover_count += 1
-
-        self.previous_dropped_count = packet.dropped_count
-        log_function = logging.info if __debug__ else logging.warning
-        log_function('At %9f, total dropped count: %s' %
-                     (packet.time_of_read, self.total_dropped_count))
-
-    @property
-    def total_dropped_count(self):
-        """Returns the total dropped count, accounting for rollovers."""
-        return self.rollover_count * 2**16 + self.previous_dropped_count
-
-    def on_begin(self):
-        if __debug__:
-            logging.info(
-                'The python debugger is enabled. Expect results to '
-                'take longer to process after collection is complete.')
-
-    def on_end(self):
-        if self.previous_dropped_count > 0:
-            if __debug__:
-                logging.info(
-                    'During collection, a total of %d packets were '
-                    'dropped. To reduce this amount, run your test '
-                    'without debug mode enabled.' % self.total_dropped_count)
-            else:
-                logging.warning(
-                    'During collection, a total of %d packets were '
-                    'dropped.' % self.total_dropped_count)
-
-
-class SampleChunker(SequentialTransformer):
-    """Chunks input packets into lists of samples with identical calibration.
-
-    This step helps to quickly apply calibration across many samples at once.
-
-    Attributes:
-        _stored_raw_samples: The queue of raw samples that have yet to be
-            split into a new calibration group.
-        calibration_data: The calibration window information.
-    """
-
-    def __init__(self):
-        super().__init__()
-        self._stored_raw_samples = []
-        self.calibration_data = HvpmCalibrationData()
-
-    def _on_end_of_stream(self, input_stream):
-        self._send_buffers(BufferList([self._cut_new_buffer()]))
-        super()._on_end_of_stream(input_stream)
-
-    def _transform_buffer(self, buffer):
-        """Takes in data from the buffer and splits it based on calibration.
-
-        This transformer is meant to after the PacketReader.
-
-        Args:
-            buffer: A list of Packet objects.
-
-        Returns:
-            A BufferList containing 0 or more UncalibratedSampleChunk objects.
-        """
-        buffer_list = BufferList()
-        for packet in buffer:
-            for sample in packet:
-                sample_type = sample.get_sample_type()
-
-                if sample_type == SampleType.MEASUREMENT:
-                    self._stored_raw_samples.append(sample)
-                elif SampleType.is_calibration(sample_type):
-                    if len(self._stored_raw_samples) > 0:
-                        buffer_list.append(self._cut_new_buffer())
-                    self.calibration_data.add_calibration_sample(sample)
-                else:
-                    # There's no information on what this packet means within
-                    # the documentation or code Monsoon Inc. provides.
-                    logging.warning('Received unidentifiable packet with '
-                                    'SampleType %s: %s' % (sample_type,
-                                                           packet.get_bytes()))
-        return buffer_list
-
-    def _cut_new_buffer(self):
-        """Cuts a new buffer from the input stream data.
-
-        Returns:
-            The newly generated UncalibratedSampleChunk.
-        """
-        calibration_snapshot = CalibrationSnapshot(self.calibration_data)
-        new_chunk = UncalibratedSampleChunk(self._stored_raw_samples,
-                                            calibration_snapshot)
-        # Do not clear the list. Instead, create a new one so the old list can
-        # be owned solely by the UncalibratedSampleChunk.
-        self._stored_raw_samples = []
-        return new_chunk
-
-
-class HvpmReading(object):
-    """The result of fully calibrating a sample. Contains all Monsoon readings.
-
-    Attributes:
-        _reading_list: The list of values obtained from the Monsoon.
-        _time_of_reading: The time since sampling began that the reading was
-            collected at.
-    """
-
-    def __init__(self, reading_list, time_of_reading):
-        """
-        Args:
-            reading_list: A list of reading values in the order of:
-                [0] Main Current
-                [1] USB Current
-                [2] Aux Current
-                [3] Main Voltage
-                [4] USB Voltage
-            time_of_reading: The time the reading was received.
-        """
-        self._reading_list = reading_list
-        self._time_of_reading = time_of_reading
-
-    @property
-    def main_current(self):
-        return self._reading_list[0]
-
-    @property
-    def usb_current(self):
-        return self._reading_list[1]
-
-    @property
-    def aux_current(self):
-        return self._reading_list[2]
-
-    @property
-    def main_voltage(self):
-        return self._reading_list[3]
-
-    @property
-    def usb_voltage(self):
-        return self._reading_list[4]
-
-    @property
-    def sample_time(self):
-        return self._time_of_reading
-
-    def __add__(self, other):
-        return HvpmReading([
-            self.main_current + other.main_current,
-            self.usb_current + other.usb_current,
-            self.aux_current + other.aux_current,
-            self.main_voltage + other.main_voltage,
-            self.usb_voltage + other.usb_voltage,
-        ], self.sample_time + other.sample_time)
-
-    def __truediv__(self, other):
-        return HvpmReading([
-            self.main_current / other,
-            self.usb_current / other,
-            self.aux_current / other,
-            self.main_voltage / other,
-            self.usb_voltage / other,
-        ], self.sample_time / other)
-
-
-class CalibrationApplier(ParallelTransformer):
-    """Applies the calibration formula to the all given samples."""
-
-    def __init__(self, monsoon_status_packet):
-        super().__init__()
-        self.cal_constants = HvpmCalibrationConstants(monsoon_status_packet)
-        monsoon = HVPM.Monsoon()
-        self.fine_threshold = monsoon.fineThreshold
-        self._main_voltage_scale = monsoon.mainvoltageScale
-        self._usb_voltage_scale = monsoon.usbVoltageScale
-        # According to Monsoon.sampleEngine.__ADCRatio, each tick of the ADC
-        # represents this much voltage
-        self._adc_ratio = 6.25e-5
-
-    @staticmethod
-    def _is_device_calibrated(data):
-        """Checks to see if the Monsoon has completed calibration.
-
-        Args:
-            data: the calibration data.
-
-        Returns:
-            True if the data is calibrated. False otherwise.
-        """
-        try:
-            # If the data is calibrated for any Origin.REFERENCE value, it is
-            # calibrated for all Origin.REFERENCE values. The same is true for
-            # Origin.ZERO.
-            data.get(Channel.MAIN, Origin.REFERENCE, Granularity.COARSE)
-            data.get(Channel.MAIN, Origin.ZERO, Granularity.COARSE)
-        except CalibrationError:
-            return False
-        return True
-
-    def _transform_buffer(self, buffer):
-        """Transforms the buffer's information into HvpmReadings.
-
-        Args:
-            buffer: An UncalibratedSampleChunk. This buffer is in-place
-                transformed into a buffer of HvpmReadings.
-        """
-        calibration_data = buffer.calibration_data
-
-        if not self._is_device_calibrated(calibration_data):
-            buffer.samples.clear()
-            return buffer.samples
-
-        readings = np.zeros((len(buffer.samples), 5))
-
-        measurements = np.array([sample.values for sample in buffer.samples])
-        calibrated_value = np.zeros((len(buffer.samples), 2))
-
-        for channel in Channel.values:
-            for granularity in Granularity.values:
-                scale = self.cal_constants.get(channel, Origin.SCALE,
-                                               granularity)
-                zero_offset = self.cal_constants.get(channel, Origin.ZERO,
-                                                     granularity)
-                cal_ref = calibration_data.get(channel, Origin.REFERENCE,
-                                               granularity)
-                cal_zero = calibration_data.get(channel, Origin.ZERO,
-                                                granularity)
-                zero_offset += cal_zero
-                if cal_ref - zero_offset != 0:
-                    slope = scale / (cal_ref - zero_offset)
-                else:
-                    slope = 0
-                if granularity == Granularity.FINE:
-                    slope /= 1000
-
-                index = HvpmMeasurement.get_index(channel, granularity)
-                calibrated_value[:, granularity] = slope * (
-                    measurements[:, index] - zero_offset)
-
-            fine_data_position = HvpmMeasurement.get_index(
-                channel, Granularity.FINE)
-            readings[:, channel] = np.where(
-                measurements[:, fine_data_position] < self.fine_threshold,
-                calibrated_value[:, Granularity.FINE],
-                calibrated_value[:, Granularity.COARSE]) / 1000.0  # to mA
-
-        main_voltage_index = HvpmMeasurement.get_index(Channel.MAIN,
-                                                       Reading.VOLTAGE)
-        usb_voltage_index = HvpmMeasurement.get_index(Channel.USB,
-                                                      Reading.VOLTAGE)
-        readings[:, 3] = (measurements[:, main_voltage_index] * self._adc_ratio
-                          * self._main_voltage_scale)
-        readings[:, 4] = (measurements[:, usb_voltage_index] * self._adc_ratio
-                          * self._usb_voltage_scale)
-
-        for i in range(len(buffer.samples)):
-            buffer.samples[i] = HvpmReading(
-                list(readings[i]), buffer.samples[i].get_sample_time())
-
-        return buffer.samples
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/calibrations.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/calibrations.py
deleted file mode 100644
index d9f5fdb..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/calibrations.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Note: These calibration classes are based on the original reverse-engineered
-algorithm for handling calibration values. As a result, LvpmCalibrationConstants
-does not exist for the LVPM stock sampling algorithm."""
-
-import itertools
-from collections import deque
-
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationWindows
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.packet import SampleType
-
-# The numerator used for FINE granularity calibration.
-_FINE_NUMERATOR = .0332
-
-# The numerator used for COARSE granularity calibration
-_COARSE_NUMERATOR = 2.88
-
-
-class LvpmCalibrationData(CalibrationWindows):
-    """An object that holds the Dynamic Calibration values for HVPM Sampling."""
-
-    def __init__(self, calibration_window_size=5):
-        super().__init__(calibration_window_size)
-
-        all_variable_sets = [
-            Channel.values,
-            (Origin.REFERENCE, Origin.ZERO),
-            Granularity.values
-        ]  # yapf: disable
-
-        for key in itertools.product(*all_variable_sets):
-            self._calibrations[key] = deque()
-
-    def add_calibration_sample(self, sample):
-        """Adds calibration values from a calibration sample.
-
-        LVPM Calibration Data is stored as:
-            [0]: Main Current calibration
-            [1]: USB Current calibration
-            [2]: Aux Current calibration
-            [3]: Main Voltage (unknown if this is actually calibration or a
-                               measurement!)
-
-        Note that coarse vs fine is determined by the position within the
-        packet. Even indexes are fine values, odd indexes are coarse values.
-        """
-        sample_type = sample.get_sample_type()
-        if sample_type == SampleType.ZERO_CAL:
-            origin = Origin.ZERO
-        elif sample_type == SampleType.REF_CAL:
-            origin = Origin.REFERENCE
-        else:
-            raise ValueError(
-                'Packet of type %s is not a calibration packet.' % sample_type)
-        granularity = sample.get_calibration_granularity()
-        for channel in Channel.values:
-            self.add(channel, origin, granularity, sample[channel])
-
-
-class LvpmCalibrationSnapshot(CalibrationSnapshot):
-    """A class that holds a snapshot of LVPM Calibration Data.
-
-    According to the original reverse-engineered algorithm for obtaining
-    samples, the LVPM determines scale from the reference and zero calibration
-    values. Here, we calculate those when taking a snapshot."""
-
-    def __init__(self, lvpm_calibration_base):
-        super().__init__(lvpm_calibration_base)
-        pairs = itertools.product(Channel.values, Granularity.values)
-
-        for channel, granularity in pairs:
-            if granularity == Granularity.COARSE:
-                numerator = _COARSE_NUMERATOR
-            else:
-                numerator = _FINE_NUMERATOR
-
-            divisor = (
-                self._calibrations[(channel, Origin.REFERENCE, granularity)] -
-                self._calibrations[(channel, Origin.ZERO, granularity)])
-            # Prevent division by zero.
-            if divisor == 0:
-                divisor = .0001
-
-            self._calibrations[(channel, Origin.SCALE,
-                                granularity)] = (numerator / divisor)
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/packet.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/packet.py
deleted file mode 100644
index 75d7af3..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/packet.py
+++ /dev/null
@@ -1,222 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import struct
-
-from antlion.controllers.monsoon_lib.sampling.enums import Reading
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-
-
-class SampleType:
-    """An enum-like class that defines the SampleTypes for LVPM data.
-
-    Note that these values differ from the HVPM values.
-    """
-
-    # A measurement sample.
-    MEASUREMENT = 0x00
-
-    # A zero calibration sample.
-    ZERO_CAL = 0x01
-
-    # A reference calibration sample.
-    REF_CAL = 0x02
-
-    @staticmethod
-    def is_calibration(value):
-        """Returns true iff the SampleType is a type of calibration."""
-        return value == SampleType.ZERO_CAL or value == SampleType.REF_CAL
-
-
-class LvpmMeasurement(object):
-    """An object that tracks an individual measurement within the LvpmPacket.
-
-    Attributes:
-        _sample_time: The time the sample was taken.
-        _sample_type: The type of sample stored.
-        values: From reverse engineering, the values are as follows:
-
-
-    If the measurement is a calibration measurement:
-
-    Val │  Byte  │  Type  │ Monsoon │ Reading │
-    Pos │ Offset │ Format │ Channel │  Type   │ Description
-    ────┼────────┼────────┼─────────┼─────────┼──────────────────────────────
-     0  │   0    │  int16 │  Main   │ Current │ Calibration value.
-     1  │   2    │  int16 │  USB    │ Current │ Calibration value.
-     2  │   4    │  int16 │  Aux    │ Current │ Calibration value.
-     3  │   6    │ uint16 │  Main   │ Voltage │ Calibration value.
-
-    If the measurement is a power reading:
-
-    Val │  Byte  │  Type  │ Monsoon │ Reading │
-    Pos │ Offset │ Format │ Channel │  Type   │ Description
-    ────┼────────┼────────┼─────────┼─────────┼──────────────────────────────
-     0  │   0    │  int16 │  Main   │ Current │ b0: if 1, Coarse, else Fine
-        │        │        │         │         │ b1-7: Measurement value.
-     1  │   2    │  int16 │  USB    │ Current │ b0: if 1, Coarse, else Fine
-        │        │        │         │         │ b1-7: Measurement value.
-     2  │   4    │  int16 │  Aux    │ Current │ b0: if 1, Coarse, else Fine
-        │        │        │         │         │ b1-7: Measurement value.
-     3  │   6    │ uint16 │  Main   │ Voltage │ Measurement value.
-
-    """
-
-    # The total number of bytes in a measurement. See the table above.
-    SIZE = 8
-
-    def __init__(self, raw_data, sample_time, sample_type, entry_index):
-        """Creates a new LVPM Measurement.
-
-        Args:
-            raw_data: The raw data format of the LvpmMeasurement.
-            sample_time: The time the sample was recorded.
-            sample_type: The type of sample that was recorded.
-            entry_index: The index of the measurement within the packet.
-        """
-        self.values = struct.unpack('>3hH', raw_data)
-        self._sample_time = sample_time
-        self._sample_type = sample_type
-
-        if SampleType.is_calibration(self._sample_type):
-            # Calibration packets have granularity values determined by whether
-            # or not the entry was odd or even within the returned packet.
-            if entry_index % 2 == 0:
-                self._granularity = Granularity.FINE
-            else:
-                self._granularity = Granularity.COARSE
-        else:
-            # If it is not a calibration packet, each individual reading (main
-            # current, usb current, etc) determines granularity value by
-            # checking the LSB of the measurement value.
-            self._granularity = None
-
-    def __getitem__(self, channel_or_reading):
-        """Returns the requested reading for the given channel.
-
-        Args:
-            channel_or_reading: either a Channel or Reading.Voltage.
-        """
-        if channel_or_reading == Reading.VOLTAGE:
-            return self.values[3]
-        else:
-            # Must be a channel. If it is not, this line will throw an
-            # IndexError, which is what we will want for invalid values.
-            return self.values[channel_or_reading]
-
-    def get_sample_time(self):
-        """Returns the time (since the start time) this sample was collected."""
-        return self._sample_time
-
-    def get_sample_type(self):
-        """Returns a value contained in SampleType."""
-        return self._sample_type
-
-    def get_calibration_granularity(self):
-        """Returns the granularity associated with this packet.
-
-        If the packet is not a calibration packet, None is returned.
-        """
-        return self._granularity
-
-
-class Packet(object):
-    """A packet collected directly from serial.read() during sample collection.
-
-    Note that the true documentation for this has been lost to time. This class
-    and documentation uses knowledge that comes from several reverse-engineering
-    projects. Most of this knowledge comes from
-    http://wiki/Main/MonsoonProtocol.
-
-    The data table looks approximately like this:
-
-    Offset │ Format  │ Field   │ Description
-    ───────┼─────────┼─────────┼────────────────────────────────────────────
-       0   │  uint8  │  flags  │ Bits:
-           │         │    &    │  * b0-3: Sequence number (0-15). Increments
-           │         │   seq   │          each packet
-           │         │         │  * b4: 1 means over-current or thermal kill
-           │         │         │  * b5: Main Output, 1 == unit is at voltage,
-           │         │         │                     0 == output disabled.
-           │         │         │  * b6-7: reserved.
-       1   │  uint8  │ packet  │ The type of the packet:
-           │         │  type   │   * 0: A data packet
-           │         │         │   * 1: A zero calibration packet
-           │         │         │   * 2: A reference calibration packet
-       2   │  uint8  │ unknown │ Always seems to be 0x00
-       3   │  uint8  │ unknown │ Always seems to be 0x00 or 0xC4.
-       4   │ byte[8] │   data  │ See LvpmMeasurement.
-      ...  │ byte[8] │   data  │ Additional LvpmMeasurements.
-      -1   │  uint8  │ unknown │ Last byte, unknown values. Has been seen to
-           │         │         │ usually be \x00, or \x84.
-
-    Attributes:
-        _packet_data: The raw data received from the packet.
-        time_of_read: The unix timestamp this packet was collected at.
-        time_since_last_sample: The difference between this packet's
-            time_of_read and the previous packet's.
-    """
-
-    # The number of bytes before the first packet.
-    FIRST_MEASUREMENT_OFFSET = 4
-
-    def __init__(self, sampled_bytes, time_of_read,
-                 time_since_last_sample):
-        self._packet_data = sampled_bytes
-        self.time_of_read = time_of_read
-        self.time_since_last_sample = time_since_last_sample
-
-        num_data_bytes = len(sampled_bytes) - Packet.FIRST_MEASUREMENT_OFFSET
-        num_packets = num_data_bytes // LvpmMeasurement.SIZE
-
-        sample_struct_format = (str(LvpmMeasurement.SIZE) + 's') * num_packets
-        struct_string = '>2B2x%sx' % sample_struct_format
-
-        self._flag_data, self.packet_type, *samples = struct.unpack(
-            struct_string, sampled_bytes)
-
-        self.measurements = [None] * len(samples)
-
-        for index, raw_measurement in enumerate(samples):
-            self.measurements[index] = LvpmMeasurement(
-                raw_measurement, self._get_sample_time(index),
-                self.packet_type, index)
-
-    def _get_sample_time(self, index):
-        """Returns the time the sample at the given index was received.
-
-        If multiple samples were captured within the same reading, the samples
-        are assumed to be uniformly distributed during the time it took to
-        sample the values.
-
-        Args:
-            index: the index of the individual reading from within the sample.
-        """
-        time_per_sample = self.time_since_last_sample / len(self.measurements)
-        return time_per_sample * (index + 1) + self.time_of_read
-
-    @property
-    def packet_counter(self):
-        return self._flag_data & 0x0F
-
-    def get_bytes(self, start, end_exclusive):
-        """Returns a bytearray spanning from start to the end (exclusive)."""
-        return self._packet_data[start:end_exclusive]
-
-    def __getitem__(self, index):
-        return self.measurements[index]
-
-    def __len__(self):
-        return len(self.measurements)
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/stock_transformers.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/stock_transformers.py
deleted file mode 100644
index 39658dd..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/stock_transformers.py
+++ /dev/null
@@ -1,389 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import struct
-import time
-
-import numpy as np
-
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon_proxy import MonsoonProxy
-from antlion.controllers.monsoon_lib.sampling.common import UncalibratedSampleChunk
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ProcessAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationError
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SourceTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import Transformer
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.calibrations import LvpmCalibrationData
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.calibrations import LvpmCalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.packet import Packet
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.packet import SampleType
-
-
-class StockLvpmSampler(Transformer):
-    """Gathers samples from the Monsoon and brings them back to the caller."""
-
-    def __init__(self, monsoon_serial, duration):
-        super().__init__()
-        self.monsoon_serial = monsoon_serial
-        self.duration = duration
-
-    def _transform(self, input_stream):
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ProcessAssemblyLineBuilder()
-         .source(PacketCollector(self.monsoon_serial, self.duration))
-         .into(SampleNormalizer())
-         .build(output_stream=self.output_stream)
-         .run())
-        # yapf: enable
-
-
-class PacketCollector(SourceTransformer):
-    """Collects Monsoon packets into a buffer to be sent to another process."""
-
-    def __init__(self, serial=None, sampling_duration=None):
-        super().__init__()
-        self._monsoon_serial = serial
-        self._monsoon_proxy = None
-        self.start_time = 0
-        self.sampling_duration = sampling_duration
-
-    def _initialize_monsoon(self):
-        """Initializes the MonsoonProxy object."""
-        self._monsoon_proxy = MonsoonProxy(serialno=self._monsoon_serial)
-
-    def on_begin(self):
-        """Begins data collection."""
-        self.start_time = time.time()
-        self._initialize_monsoon()
-        self._monsoon_proxy.start_data_collection()
-
-    def on_end(self):
-        """Stops data collection."""
-        self._monsoon_proxy.stop_data_collection()
-        self._monsoon_proxy.ser.close()
-
-    def _transform_buffer(self, buffer):
-        """Fills the given buffer with raw monsoon data at each entry."""
-        if (self.sampling_duration
-                and self.sampling_duration < time.time() - self.start_time):
-            return None
-
-        for index in range(len(buffer)):
-            time_before_read = time.time()
-            data = self._read_packet()
-            if data is None:
-                continue
-            time_after_read = time.time()
-            time_data = struct.pack('dd', time_after_read,
-                                    time_after_read - time_before_read)
-            buffer[index] = time_data + data
-
-        return buffer
-
-    def _read_packet(self):
-        """Reads a single packet from the serial port.
-
-        Packets are sent as Length-Value-Checksum, where the first byte is the
-        length, the following bytes are the value and checksum. The checksum is
-        the stored in the final byte, and is calculated as the 16 least-
-        significant-bits of the sum of all value bytes.
-
-        Returns:
-            None if the read failed. Otherwise, the packet data received.
-        """
-        len_char = self._monsoon_proxy.ser.read(1)
-        if not len_char:
-            logging.warning('Reading from serial timed out.')
-            return None
-
-        data_len = ord(len_char)
-        if not data_len:
-            logging.warning('Unable to read packet length.')
-            return None
-
-        result = self._monsoon_proxy.ser.read(int(data_len))
-        result = bytearray(result)
-        if len(result) != data_len:
-            logging.warning(
-                'Length mismatch, expected %d bytes, got %d bytes.', data_len,
-                len(result))
-            return None
-        body = result[:-1]
-        checksum = sum(body, data_len) & 0xFF
-        if result[-1] != checksum:
-            logging.warning(
-                'Invalid checksum from serial port! Expected %s, '
-                'got %s', hex(checksum), hex(result[-1]))
-            return None
-        return body
-
-
-class SampleNormalizer(Transformer):
-    """Normalizes the raw packet data into reading values."""
-
-    def _transform(self, input_stream):
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ThreadAssemblyLineBuilder()
-         .source(PacketReader(), input_stream=input_stream)
-         .into(SampleChunker())
-         .into(CalibrationApplier())
-         .build(output_stream=self.output_stream)
-         .run())
-        # yapf: enable
-
-    def _transform_buffer(self, buffer):
-        """_transform is overloaded, so this function can be left empty."""
-
-
-class PacketReader(ParallelTransformer):
-    """Reads the raw packets and converts them into LVPM Packet objects."""
-
-    def _transform_buffer(self, buffer):
-        """Converts the raw packets to Packet objects in-place in buffer.
-
-        Args:
-            buffer: A list of bytes objects. Will be in-place replaced with
-                Packet objects.
-        """
-        for i, packet in enumerate(buffer):
-            time_bytes_size = struct.calcsize('dd')
-            # Unpacks the two time.time() values sent by PacketCollector.
-            time_of_read, time_since_last_read = struct.unpack(
-                'dd', packet[:time_bytes_size])
-            packet = packet[time_bytes_size:]
-            # Magic number explanation:
-            # LVPM sample packets begin with 4 bytes, have at least one
-            # measurement (8 bytes), and have 1 last byte (usually a \x00 byte).
-            if len(packet) < 4 + 8 + 1 or packet[0] & 0x20 != 0x20:
-                logging.warning(
-                    'Tried to collect power sample values, received data of '
-                    'type=0x%02x, len=%d instead.', packet[0], len(packet))
-                buffer[i] = None
-                continue
-
-            buffer[i] = Packet(packet, time_of_read, time_since_last_read)
-
-        return buffer
-
-
-class SampleChunker(SequentialTransformer):
-    """Chunks input packets into lists of samples with identical calibration.
-
-    This step helps to quickly apply calibration across many samples at once.
-
-    Attributes:
-        _stored_raw_samples: The queue of raw samples that have yet to be
-            split into a new calibration group.
-        calibration_data: The calibration window information.
-    """
-
-    def __init__(self):
-        super().__init__()
-        self._stored_raw_samples = []
-        self.calibration_data = LvpmCalibrationData()
-
-    def _on_end_of_stream(self, input_stream):
-        self._send_buffers(BufferList([self._cut_new_buffer()]))
-        super()._on_end_of_stream(input_stream)
-
-    def _transform_buffer(self, buffer):
-        """Takes in data from the buffer and splits it based on calibration.
-
-        This transformer is meant to after the PacketReader.
-
-        Args:
-            buffer: A list of Packet objects.
-
-        Returns:
-            A BufferList containing 0 or more UncalibratedSampleChunk objects.
-        """
-        buffer_list = BufferList()
-        for packet in buffer:
-            # If a read packet was not a sample, the PacketReader returns None.
-            # Skip over these dud values.
-            if packet is None:
-                continue
-
-            for sample in packet:
-                sample_type = sample.get_sample_type()
-
-                if sample_type == SampleType.MEASUREMENT:
-                    self._stored_raw_samples.append(sample)
-                elif SampleType.is_calibration(sample_type):
-                    if len(self._stored_raw_samples) > 0:
-                        buffer_list.append(self._cut_new_buffer())
-                    self.calibration_data.add_calibration_sample(sample)
-                else:
-                    # There's no information on what this packet means within
-                    # Monsoon documentation or code.
-                    logging.warning('Received unidentifiable packet with '
-                                    'SampleType %s: %s' %
-                                    (sample_type, packet.get_bytes(0, None)))
-        return buffer_list
-
-    def _cut_new_buffer(self):
-        """Cuts a new buffer from the input stream data.
-
-        Returns:
-            The newly generated UncalibratedSampleChunk.
-        """
-        calibration_snapshot = LvpmCalibrationSnapshot(self.calibration_data)
-        new_chunk = UncalibratedSampleChunk(self._stored_raw_samples,
-                                            calibration_snapshot)
-        self._stored_raw_samples = []
-        return new_chunk
-
-
-class LvpmReading(object):
-    """The result of fully calibrating a sample. Contains all Monsoon readings.
-
-    Attributes:
-        _reading_list: The list of values obtained from the Monsoon.
-        _time_of_reading: The time since sampling began that the reading was
-            collected at.
-    """
-
-    def __init__(self, reading_list, time_of_reading):
-        """Creates an LvpmReading.
-
-        Args:
-            reading_list:
-                [0] Main Current
-                [1] USB Current
-                [2] Aux Current
-                [3] Main Voltage
-            time_of_reading: The time the reading was received.
-        """
-        self._reading_list = reading_list
-        self._time_of_reading = time_of_reading
-
-    @property
-    def main_current(self):
-        return self._reading_list[0]
-
-    @property
-    def usb_current(self):
-        return self._reading_list[1]
-
-    @property
-    def aux_current(self):
-        return self._reading_list[2]
-
-    @property
-    def main_voltage(self):
-        return self._reading_list[3]
-
-    @property
-    def sample_time(self):
-        return self._time_of_reading
-
-    def __add__(self, other):
-        reading_list = [
-            self.main_current + other.main_current,
-            self.usb_current + other.usb_current,
-            self.aux_current + other.aux_current,
-            self.main_voltage + other.main_voltage,
-        ]
-        sample_time = self.sample_time + other.sample_time
-
-        return LvpmReading(reading_list, sample_time)
-
-    def __truediv__(self, other):
-        reading_list = [
-            self.main_current / other,
-            self.usb_current / other,
-            self.aux_current / other,
-            self.main_voltage / other,
-        ]
-        sample_time = self.sample_time / other
-
-        return LvpmReading(reading_list, sample_time)
-
-
-class CalibrationApplier(ParallelTransformer):
-    """Applies the calibration formula to the all given samples.
-
-    Designed to come after a SampleChunker Transformer.
-    """
-
-    @staticmethod
-    def _is_device_calibrated(data):
-        """Checks to see if the Monsoon has completed calibration.
-
-        Args:
-            data: the calibration data.
-
-        Returns:
-            True if the data is calibrated. False otherwise.
-        """
-        try:
-            # If the data is calibrated for any Origin.REFERENCE value, it is
-            # calibrated for all Origin.REFERENCE values. The same is true for
-            # Origin.ZERO.
-            data.get(Channel.MAIN, Origin.REFERENCE, Granularity.COARSE)
-            data.get(Channel.MAIN, Origin.ZERO, Granularity.COARSE)
-        except CalibrationError:
-            return False
-        return True
-
-    def _transform_buffer(self, buffer):
-        calibration_data = buffer.calibration_data
-
-        if not self._is_device_calibrated(calibration_data):
-            return []
-
-        measurements = np.array([sample.values for sample in buffer.samples])
-        readings = np.zeros((len(buffer.samples), 5))
-
-        for channel in Channel.values:
-            fine_zero = calibration_data.get(channel, Origin.ZERO,
-                                             Granularity.FINE)
-            fine_scale = calibration_data.get(channel, Origin.SCALE,
-                                              Granularity.FINE)
-            coarse_zero = calibration_data.get(channel, Origin.ZERO,
-                                               Granularity.COARSE)
-            coarse_scale = calibration_data.get(channel, Origin.SCALE,
-                                                Granularity.COARSE)
-
-            # A set LSB means a coarse measurement. This bit needs to be
-            # cleared before setting calibration. Note that the
-            # reverse-engineered algorithm does not rightshift the bits after
-            # this operation. This explains the mismatch of calibration
-            # constants between the reverse-engineered algorithm and the
-            # Monsoon.py algorithm.
-            readings[:, channel] = np.where(
-                measurements[:, channel] & 1,
-                ((measurements[:, channel] & ~1) - coarse_zero) * coarse_scale,
-                (measurements[:, channel] - fine_zero) * fine_scale)
-
-        # The magic number 0.000125 is documented at
-        # http://wiki/Main/MonsoonProtocol#Data_response
-        # It represents how many volts represents each tick in the sample
-        # packet.
-        readings[:, 3] = measurements[:, 3] * 0.000125
-
-        for i in range(len(buffer.samples)):
-            buffer.samples[i] = LvpmReading(
-                list(readings[i]), buffer.samples[i].get_sample_time())
-
-        return buffer.samples
diff --git a/src/antlion/controllers/native.py b/src/antlion/controllers/native.py
deleted file mode 100644
index 8e11c87..0000000
--- a/src/antlion/controllers/native.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.sl4a_lib.rpc_connection import RpcConnection
-import json
-import os
-
-HOST = os.environ.get('AP_HOST', None)
-PORT = os.environ.get('AP_PORT', 9999)
-
-
-class SL4NException(Exception):
-    pass
-
-
-class SL4NAPIError(SL4NException):
-    """Raised when remote API reports an error."""
-
-
-class SL4NProtocolError(SL4NException):
-    """Raised when there is an error exchanging data with the device server."""
-    NO_RESPONSE_FROM_HANDSHAKE = "No response from handshake."
-    NO_RESPONSE_FROM_SERVER = "No response from server."
-    MISMATCHED_API_ID = "Mismatched API id."
-
-
-def IDCounter():
-    i = 0
-    while True:
-        yield i
-        i += 1
-
-
-class NativeAndroid(RpcConnection):
-    COUNTER = IDCounter()
-
-    def _rpc(self, method, *args):
-        with self._lock:
-            apiid = next(self._counter)
-        data = {'id': apiid, 'method': method, 'params': args}
-        request = json.dumps(data)
-        self.client.write(request.encode("utf8") + b'\n')
-        self.client.flush()
-        response = self.client.readline()
-        if not response:
-            raise SL4NProtocolError(SL4NProtocolError.NO_RESPONSE_FROM_SERVER)
-        #TODO: (tturney) fix the C side from sending \x00 char over the socket.
-        result = json.loads(
-            str(response, encoding="utf8").rstrip().replace("\x00", ""))
-        if result['error']:
-            raise SL4NAPIError(result['error'])
-        if result['id'] != apiid:
-            raise SL4NProtocolError(SL4NProtocolError.MISMATCHED_API_ID)
-        return result['result']
diff --git a/src/antlion/controllers/native_android_device.py b/src/antlion/controllers/native_android_device.py
deleted file mode 100644
index b898bf6..0000000
--- a/src/antlion/controllers/native_android_device.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.utils_lib import host_utils
-import antlion.controllers.native as native
-from subprocess import call
-
-import logging
-import time
-
-#TODO(tturney): Merge this into android device
-
-MOBLY_CONTROLLER_CONFIG_NAME = "NativeAndroidDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "native_android_devices"
-
-
-def create(configs):
-    logger = logging
-    ads = get_instances(configs)
-    for ad in ads:
-        try:
-            ad.get_droid()
-        except:
-            logger.exception("Failed to start sl4n on %s" % ad.serial)
-    return ads
-
-
-def destroy(ads):
-    pass
-
-
-def get_instances(serials, ):
-    """Create AndroidDevice instances from a list of serials.
-
-    Args:
-        serials: A list of android device serials.
-        logger: A logger to be passed to each instance.
-
-    Returns:
-        A list of AndroidDevice objects.
-    """
-    results = []
-    for s in serials:
-        results.append(NativeAndroidDevice(s))
-    return results
-
-
-class NativeAndroidDeviceError(Exception):
-    pass
-
-
-class NativeAndroidDevice(AndroidDevice):
-    def __del__(self):
-        if self.h_port:
-            self.adb.forward("--remove tcp:%d" % self.h_port)
-
-    def get_droid(self, handle_event=True):
-        """Create an sl4n connection to the device.
-
-        Return the connection handler 'droid'. By default, another connection
-        on the same session is made for EventDispatcher, and the dispatcher is
-        returned to the caller as well.
-        If sl4n server is not started on the device, try to start it.
-
-        Args:
-            handle_event: True if this droid session will need to handle
-                events.
-
-        Returns:
-            droid: Android object useds to communicate with sl4n on the android
-                device.
-            ed: An optional EventDispatcher to organize events for this droid.
-
-        Examples:
-            Don't need event handling:
-            >>> ad = NativeAndroidDevice()
-            >>> droid = ad.get_droid(False)
-
-            Need event handling:
-            >>> ad = NativeAndroidDevice()
-            >>> droid, ed = ad.get_droid()
-        """
-        if not self.h_port or not host_utils.is_port_available(self.h_port):
-            self.h_port = host_utils.get_available_host_port()
-        self.adb.tcp_forward(self.h_port, self.d_port)
-        pid = self.adb.shell("pidof -s sl4n", ignore_status=True)
-        while (pid):
-            self.adb.shell("kill {}".format(pid))
-            pid = self.adb.shell("pidof -s sl4n", ignore_status=True)
-        call(
-            ["adb -s " + self.serial + " shell sh -c \"/system/bin/sl4n\" &"],
-            shell=True)
-        try:
-            time.sleep(3)
-            droid = self.start_new_session()
-        except:
-            droid = self.start_new_session()
-        return droid
-
-    def start_new_session(self):
-        """Start a new session in sl4n.
-
-        Also caches the droid in a dict with its uid being the key.
-
-        Returns:
-            An Android object used to communicate with sl4n on the android
-                device.
-
-        Raises:
-            sl4nException: Something is wrong with sl4n and it returned an
-            existing uid to a new session.
-        """
-        droid = native.NativeAndroid(port=self.h_port)
-        droid.open()
-        if droid.uid in self._droid_sessions:
-            raise bt.SL4NException(("SL4N returned an existing uid for a "
-                                    "new session. Abort."))
-            return droid
-        self._droid_sessions[droid.uid] = [droid]
-        return droid
diff --git a/src/antlion/controllers/openwrt_ap.py b/src/antlion/controllers/openwrt_ap.py
index ad6e3de..dc99ef2 100644
--- a/src/antlion/controllers/openwrt_ap.py
+++ b/src/antlion/controllers/openwrt_ap.py
@@ -10,14 +10,15 @@
 from antlion.controllers.openwrt_lib import network_settings
 from antlion.controllers.openwrt_lib import wireless_config
 from antlion.controllers.openwrt_lib import wireless_settings_applier
-from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtModelMap as modelmap
+from antlion.controllers.openwrt_lib.openwrt_constants import (
+    OpenWrtModelMap as modelmap,
+)
 from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtWifiSetting
 from antlion.controllers.openwrt_lib.openwrt_constants import SYSTEM_INFO_CMD
 from antlion.controllers.utils_lib.ssh import connection
 from antlion.controllers.utils_lib.ssh import settings
 import yaml
 
-
 MOBLY_CONTROLLER_CONFIG_NAME = "OpenWrtAP"
 ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
 OPEN_SECURITY = "none"
@@ -37,616 +38,682 @@
 
 
 def create(configs):
-  """Creates ap controllers from a json config.
+    """Creates ap controllers from a json config.
 
-  Creates an ap controller from either a list, or a single element. The element
-  can either be just the hostname or a dictionary containing the hostname and
-  username of the AP to connect to over SSH.
+    Creates an ap controller from either a list, or a single element. The element
+    can either be just the hostname or a dictionary containing the hostname and
+    username of the AP to connect to over SSH.
 
-  Args:
-    configs: The json configs that represent this controller.
+    Args:
+      configs: The json configs that represent this controller.
 
-  Returns:
-    AccessPoint object
+    Returns:
+      AccessPoint object
 
-  Example:
-    Below is the config file entry for OpenWrtAP as a list. A testbed can have
-    1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH
-    login information. OpenWrtAP#__init__() uses this to create SSH object.
+    Example:
+      Below is the config file entry for OpenWrtAP as a list. A testbed can have
+      1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH
+      login information. OpenWrtAP#__init__() uses this to create SSH object.
 
-      "OpenWrtAP": [
-        {
-          "ssh_config": {
-            "user" : "root",
-            "host" : "192.168.1.1"
+        "OpenWrtAP": [
+          {
+            "ssh_config": {
+              "user" : "root",
+              "host" : "192.168.1.1"
+            }
+          },
+          {
+            "ssh_config": {
+              "user" : "root",
+              "host" : "192.168.1.2"
+            }
           }
-        },
-        {
-          "ssh_config": {
-            "user" : "root",
-            "host" : "192.168.1.2"
-          }
-        }
-      ]
-  """
-  return [OpenWrtAP(c) for c in configs]
+        ]
+    """
+    return [OpenWrtAP(c) for c in configs]
 
 
 def destroy(aps):
-  """Destroys a list of AccessPoints.
+    """Destroys a list of AccessPoints.
 
-  Args:
-    aps: The list of AccessPoints to destroy.
-  """
-  for ap in aps:
-    ap.close()
-    ap.close_ssh()
+    Args:
+      aps: The list of AccessPoints to destroy.
+    """
+    for ap in aps:
+        ap.close()
+        ap.close_ssh()
 
 
 def get_info(aps):
-  """Get information on a list of access points.
+    """Get information on a list of access points.
 
-  Args:
-    aps: A list of AccessPoints.
+    Args:
+      aps: A list of AccessPoints.
 
-  Returns:
-    A list of all aps hostname.
-  """
-  return [ap.ssh_settings.hostname for ap in aps]
+    Returns:
+      A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in aps]
 
 
 class OpenWrtAP(object):
-  """An AccessPoint controller.
+    """An AccessPoint controller.
 
-  Attributes:
-    ssh: The ssh connection to the AP.
-    ssh_settings: The ssh settings being used by the ssh connection.
-    log: Logging object for AccessPoint.
-    wireless_setting: object holding wireless configuration.
-    network_setting: Object for network configuration.
-    model: OpenWrt HW model.
-    radios: Fit interface for test.
-  """
-
-  def __init__(self, config):
-    """Initialize AP."""
-    self.ssh_settings = settings.from_config(config["ssh_config"])
-    self.ssh = connection.SshConnection(self.ssh_settings)
-    self.log = logger.create_logger(
-        lambda msg: "[OpenWrtAP|%s] %s" % (self.ssh_settings.hostname, msg))
-    self.wireless_setting = None
-    self.network_setting = network_settings.NetworkSettings(
-        self.ssh, self.ssh_settings, self.log)
-    self.model = self.get_model_name()
-    if self.model in modelmap.__dict__:
-      self.radios = modelmap.__dict__[self.model]
-    else:
-      self.radios = DEFAULT_RADIOS
-
-  def configure_ap(self, wifi_configs, channel_2g, channel_5g):
-    """Configure AP with the required settings.
-
-    Each test class inherits WifiBaseTest. Based on the test, we may need to
-    configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any
-    combination. We call WifiBaseTest methods get_psk_network(),
-    get_open_network(), get_wep_network() and get_ent_network() to create
-    dictionaries which contains this information. 'wifi_configs' is a list of
-    such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and
-    1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to
-    configure the APs.
-
-    wifi_configs = [
-      {
-        '2g': {
-          'SSID': '2g_AkqXWPK4',
-          'security': 'psk2',
-          'password': 'YgYuXqDO9H',
-          'hiddenSSID': False
-        },
-      },
-      {
-        '5g': {
-          'SSID': '5g_8IcMR1Sg',
-          'security': 'none',
-          'hiddenSSID': False
-        },
-      }
-    ]
-
-    Args:
-      wifi_configs: list of network settings for 2G and 5G bands.
-      channel_2g: channel for 2G band.
-      channel_5g: channel for 5G band.
-    """
-    # generate wifi configs to configure
-    wireless_configs = self.generate_wireless_configs(wifi_configs)
-    self.wireless_setting = wireless_settings_applier.WirelessSettingsApplier(
-        self.ssh, wireless_configs, channel_2g, channel_5g, self.radios[1], self.radios[0])
-    self.wireless_setting.apply_wireless_settings()
-
-  def start_ap(self):
-    """Starts the AP with the settings in /etc/config/wireless."""
-    self.ssh.run("wifi up")
-    curr_time = time.time()
-    while time.time() < curr_time + WAIT_TIME:
-      if self.get_wifi_status():
-        return
-      time.sleep(3)
-    if not self.get_wifi_status():
-      raise ValueError("Failed to turn on WiFi on the AP.")
-
-  def stop_ap(self):
-    """Stops the AP."""
-    self.ssh.run("wifi down")
-    curr_time = time.time()
-    while time.time() < curr_time + WAIT_TIME:
-      if not self.get_wifi_status():
-        return
-      time.sleep(3)
-    if self.get_wifi_status():
-      raise ValueError("Failed to turn off WiFi on the AP.")
-
-  def get_bssids_for_wifi_networks(self):
-    """Get BSSIDs for wifi networks configured.
-
-    Returns:
-      Dictionary of SSID - BSSID map for both bands.
-    """
-    bssid_map = {"2g": {}, "5g": {}}
-    for radio in self.radios:
-      ssid_ifname_map = self.get_ifnames_for_ssids(radio)
-      if radio == self.radios[0]:
-        for ssid, ifname in ssid_ifname_map.items():
-          bssid_map["5g"][ssid] = self.get_bssid(ifname)
-      elif radio == self.radios[1]:
-        for ssid, ifname in ssid_ifname_map.items():
-          bssid_map["2g"][ssid] = self.get_bssid(ifname)
-    return bssid_map
-
-  def get_ifnames_for_ssids(self, radio):
-    """Get interfaces for wifi networks.
-
-    Args:
-      radio: 2g or 5g radio get the bssids from.
-
-    Returns:
-      dictionary of ssid - ifname mappings.
-    """
-    ssid_ifname_map = {}
-    str_output = self.ssh.run("wifi status %s" % radio).stdout
-    wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                            Loader=yaml.SafeLoader)
-    wifi_status = wifi_status[radio]
-    if wifi_status["up"]:
-      interfaces = wifi_status["interfaces"]
-      for config in interfaces:
-        ssid = config["config"]["ssid"]
-        ifname = config["ifname"]
-        ssid_ifname_map[ssid] = ifname
-    return ssid_ifname_map
-
-  def get_bssid(self, ifname):
-    """Get MAC address from an interface.
-
-    Args:
-      ifname: interface name of the corresponding MAC.
-
-    Returns:
-      BSSID of the interface.
-    """
-    ifconfig = self.ssh.run("ifconfig %s" % ifname).stdout
-    mac_addr = ifconfig.split("\n")[0].split()[-1]
-    return mac_addr
-
-  def set_wpa_encryption(self, encryption):
-    """Set different encryptions to wpa or wpa2.
-
-    Args:
-      encryption: ccmp, tkip, or ccmp+tkip.
-    """
-    str_output = self.ssh.run("wifi status").stdout
-    wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                            Loader=yaml.SafeLoader)
-
-    # Counting how many interface are enabled.
-    total_interface = 0
-    for radio in self.radios:
-      num_interface = len(wifi_status[radio]["interfaces"])
-      total_interface += num_interface
-
-    # Iterates every interface to get and set wpa encryption.
-    default_extra_interface = 2
-    for i in range(total_interface + default_extra_interface):
-      origin_encryption = self.ssh.run(
-          "uci get wireless.@wifi-iface[{}].encryption".format(i)).stdout
-      origin_psk_pattern = re.match(r"psk\b", origin_encryption)
-      target_psk_pattern = re.match(r"psk\b", encryption)
-      origin_psk2_pattern = re.match(r"psk2\b", origin_encryption)
-      target_psk2_pattern = re.match(r"psk2\b", encryption)
-
-      if origin_psk_pattern == target_psk_pattern:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].encryption={}".format(
-                i, encryption))
-
-      if origin_psk2_pattern == target_psk2_pattern:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].encryption={}".format(
-                i, encryption))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("wifi")
-
-  def set_password(self, pwd_5g=None, pwd_2g=None):
-    """Set password for individual interface.
-
-    Args:
-        pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network.
-        pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network.
-    """
-    if pwd_5g:
-      if len(pwd_5g) < 8 or len(pwd_5g) > 63:
-        self.log.error("Password must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      elif not re.match("^[A-Za-z0-9]*$", pwd_5g):
-        self.log.error("Password must only contains ascii letters and digits")
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].key={}".format(3, pwd_5g))
-        self.log.info("Set 5G password to :{}".format(pwd_5g))
-
-    if pwd_2g:
-      if len(pwd_2g) < 8 or len(pwd_2g) > 63:
-        self.log.error("Password must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      elif not re.match("^[A-Za-z0-9]*$", pwd_2g):
-        self.log.error("Password must only contains ascii letters and digits")
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].key={}".format(2, pwd_2g))
-        self.log.info("Set 2G password to :{}".format(pwd_2g))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("wifi")
-
-  def set_ssid(self, ssid_5g=None, ssid_2g=None):
-    """Set SSID for individual interface.
-
-    Args:
-        ssid_5g: 8 ~ 63 chars for 5g network.
-        ssid_2g: 8 ~ 63 chars for 2g network.
-    """
-    if ssid_5g:
-      if len(ssid_5g) < 8 or len(ssid_5g) > 63:
-        self.log.error("SSID must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].ssid={}".format(3, ssid_5g))
-        self.log.info("Set 5G SSID to :{}".format(ssid_5g))
-
-    if ssid_2g:
-      if len(ssid_2g) < 8 or len(ssid_2g) > 63:
-        self.log.error("SSID must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].ssid={}".format(2, ssid_2g))
-        self.log.info("Set 2G SSID to :{}".format(ssid_2g))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("wifi")
-
-  def generate_mobility_domain(self):
-    """Generate 4-character hexadecimal ID.
-
-    Returns:
-      String; a 4-character hexadecimal ID.
-    """
-    md = "{:04x}".format(random.getrandbits(16))
-    self.log.info("Mobility Domain ID: {}".format(md))
-    return md
-
-  def enable_80211r(self, iface, md):
-    """Enable 802.11r for one single radio.
-
-    Args:
-      iface: index number of wifi-iface.
-              2: radio1
-              3: radio0
-      md: mobility domain. a 4-character hexadecimal ID.
-    Raises:
-      TestSkip if 2g or 5g radio is not up or 802.11r is not enabled.
-    """
-    str_output = self.ssh.run("wifi status").stdout
-    wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                            Loader=yaml.SafeLoader)
-    # Check if the radio is up.
-    if iface == OpenWrtWifiSetting.IFACE_2G:
-      if wifi_status[self.radios[1]]["up"]:
-        self.log.info("2g network is ENABLED")
-      else:
-        raise signals.TestSkip("2g network is NOT ENABLED")
-    elif iface == OpenWrtWifiSetting.IFACE_5G:
-      if wifi_status[self.radios[0]]["up"]:
-        self.log.info("5g network is ENABLED")
-      else:
-        raise signals.TestSkip("5g network is NOT ENABLED")
-
-    # Setup 802.11r.
-    self.ssh.run(
-        "uci set wireless.@wifi-iface[{}].ieee80211r='1'".format(iface))
-    self.ssh.run(
-        "uci set wireless.@wifi-iface[{}].ft_psk_generate_local='1'"
-        .format(iface))
-    self.ssh.run(
-        "uci set wireless.@wifi-iface[{}].mobility_domain='{}'"
-        .format(iface, md))
-    self.ssh.run(
-        "uci commit wireless")
-    self.ssh.run("wifi")
-
-    # Check if 802.11r is enabled.
-    result = self.ssh.run(
-        "uci get wireless.@wifi-iface[{}].ieee80211r".format(iface)).stdout
-    if result == "1":
-      self.log.info("802.11r is ENABLED")
-    else:
-      raise signals.TestSkip("802.11r is NOT ENABLED")
-
-  def generate_wireless_configs(self, wifi_configs):
-    """Generate wireless configs to configure.
-
-    Converts wifi_configs from configure_ap() to a list of 'WirelessConfig'
-    objects. Each object represents a wifi network to configure on the AP.
-
-    Args:
-      wifi_configs: Network list of different security types and bands.
-
-    Returns:
-      wireless configuration for openwrt AP.
-    """
-    num_2g = 1
-    num_5g = 1
-    wireless_configs = []
-
-    for i in range(len(wifi_configs)):
-      if hostapd_constants.BAND_2G in wifi_configs[i]:
-        config = wifi_configs[i][hostapd_constants.BAND_2G]
-        if config["security"] == PSK_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == PSK1_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == WEP_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             wep_key=config["wepKeys"][0],
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OPEN_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OWE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAEMIXED_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == ENT_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig(
-                  "%s%s" % (WIFI_2G, num_2g),
-                  config["SSID"],
-                  config["security"],
-                  hostapd_constants.BAND_2G,
-                  radius_server_ip=config["radius_server_ip"],
-                  radius_server_port=config["radius_server_port"],
-                  radius_server_secret=config["radius_server_secret"],
-                  hidden=config["hiddenSSID"]))
-        num_2g += 1
-      if hostapd_constants.BAND_5G in wifi_configs[i]:
-        config = wifi_configs[i][hostapd_constants.BAND_5G]
-        if config["security"] == PSK_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == PSK1_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == WEP_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             wep_key=config["wepKeys"][0],
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OPEN_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OWE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAEMIXED_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == ENT_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig(
-                  "%s%s" % (WIFI_5G, num_5g),
-                  config["SSID"],
-                  config["security"],
-                  hostapd_constants.BAND_5G,
-                  radius_server_ip=config["radius_server_ip"],
-                  radius_server_port=config["radius_server_port"],
-                  radius_server_secret=config["radius_server_secret"],
-                  hidden=config["hiddenSSID"]))
-        num_5g += 1
-
-    return wireless_configs
-
-  def get_wifi_network(self, security=None, band=None):
-    """Return first match wifi interface's config.
-
-    Args:
-      security: psk2 or none
-      band: '2g' or '5g'
-
-    Returns:
-      A dict contains match wifi interface's config.
+    Attributes:
+      ssh: The ssh connection to the AP.
+      ssh_settings: The ssh settings being used by the ssh connection.
+      log: Logging object for AccessPoint.
+      wireless_setting: object holding wireless configuration.
+      network_setting: Object for network configuration.
+      model: OpenWrt HW model.
+      radios: Fit interface for test.
     """
 
-    for wifi_iface in self.wireless_setting.wireless_configs:
-      match_list = []
-      wifi_network = wifi_iface.__dict__
-      if security:
-        match_list.append(security == wifi_network["security"])
-      if band:
-        match_list.append(band == wifi_network["band"])
+    def __init__(self, config):
+        """Initialize AP."""
+        self.ssh_settings = settings.from_config(config["ssh_config"])
+        self.ssh = connection.SshConnection(self.ssh_settings)
+        self.log = logger.create_logger(
+            lambda msg: "[OpenWrtAP|%s] %s" % (self.ssh_settings.hostname, msg)
+        )
+        self.wireless_setting = None
+        self.network_setting = network_settings.NetworkSettings(
+            self.ssh, self.ssh_settings, self.log
+        )
+        self.model = self.get_model_name()
+        if self.model in modelmap.__dict__:
+            self.radios = modelmap.__dict__[self.model]
+        else:
+            self.radios = DEFAULT_RADIOS
 
-      if all(match_list):
-        wifi_network["SSID"] = wifi_network["ssid"]
-        if not wifi_network["password"]:
-          del wifi_network["password"]
-        return wifi_network
-    return None
+    def configure_ap(self, wifi_configs, channel_2g, channel_5g):
+        """Configure AP with the required settings.
 
-  def get_wifi_status(self):
-    """Check if radios are up. Default are 2G and 5G bands.
+        Each test class inherits WifiBaseTest. Based on the test, we may need to
+        configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any
+        combination. We call WifiBaseTest methods get_psk_network(),
+        get_open_network(), get_wep_network() and get_ent_network() to create
+        dictionaries which contains this information. 'wifi_configs' is a list of
+        such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and
+        1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to
+        configure the APs.
 
-    Returns:
-      True if both radios are up. False if not.
-    """
-    status = True
-    for radio in self.radios:
-      try:
+        wifi_configs = [
+          {
+            '2g': {
+              'SSID': '2g_AkqXWPK4',
+              'security': 'psk2',
+              'password': 'YgYuXqDO9H',
+              'hiddenSSID': False
+            },
+          },
+          {
+            '5g': {
+              'SSID': '5g_8IcMR1Sg',
+              'security': 'none',
+              'hiddenSSID': False
+            },
+          }
+        ]
+
+        Args:
+          wifi_configs: list of network settings for 2G and 5G bands.
+          channel_2g: channel for 2G band.
+          channel_5g: channel for 5G band.
+        """
+        # generate wifi configs to configure
+        wireless_configs = self.generate_wireless_configs(wifi_configs)
+        self.wireless_setting = wireless_settings_applier.WirelessSettingsApplier(
+            self.ssh,
+            wireless_configs,
+            channel_2g,
+            channel_5g,
+            self.radios[1],
+            self.radios[0],
+        )
+        self.wireless_setting.apply_wireless_settings()
+
+    def start_ap(self):
+        """Starts the AP with the settings in /etc/config/wireless."""
+        self.ssh.run("wifi up")
+        curr_time = time.time()
+        while time.time() < curr_time + WAIT_TIME:
+            if self.get_wifi_status():
+                return
+            time.sleep(3)
+        if not self.get_wifi_status():
+            raise ValueError("Failed to turn on WiFi on the AP.")
+
+    def stop_ap(self):
+        """Stops the AP."""
+        self.ssh.run("wifi down")
+        curr_time = time.time()
+        while time.time() < curr_time + WAIT_TIME:
+            if not self.get_wifi_status():
+                return
+            time.sleep(3)
+        if self.get_wifi_status():
+            raise ValueError("Failed to turn off WiFi on the AP.")
+
+    def get_bssids_for_wifi_networks(self):
+        """Get BSSIDs for wifi networks configured.
+
+        Returns:
+          Dictionary of SSID - BSSID map for both bands.
+        """
+        bssid_map = {"2g": {}, "5g": {}}
+        for radio in self.radios:
+            ssid_ifname_map = self.get_ifnames_for_ssids(radio)
+            if radio == self.radios[0]:
+                for ssid, ifname in ssid_ifname_map.items():
+                    bssid_map["5g"][ssid] = self.get_bssid(ifname)
+            elif radio == self.radios[1]:
+                for ssid, ifname in ssid_ifname_map.items():
+                    bssid_map["2g"][ssid] = self.get_bssid(ifname)
+        return bssid_map
+
+    def get_ifnames_for_ssids(self, radio):
+        """Get interfaces for wifi networks.
+
+        Args:
+          radio: 2g or 5g radio get the bssids from.
+
+        Returns:
+          dictionary of ssid - ifname mappings.
+        """
+        ssid_ifname_map = {}
         str_output = self.ssh.run("wifi status %s" % radio).stdout
-        wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                                Loader=yaml.SafeLoader)
-        status = wifi_status[radio]["up"] and status
-      except:
-        self.log.info("Failed to make ssh connection to the OpenWrt")
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+        wifi_status = wifi_status[radio]
+        if wifi_status["up"]:
+            interfaces = wifi_status["interfaces"]
+            for config in interfaces:
+                ssid = config["config"]["ssid"]
+                ifname = config["ifname"]
+                ssid_ifname_map[ssid] = ifname
+        return ssid_ifname_map
+
+    def get_bssid(self, ifname):
+        """Get MAC address from an interface.
+
+        Args:
+          ifname: interface name of the corresponding MAC.
+
+        Returns:
+          BSSID of the interface.
+        """
+        ifconfig = self.ssh.run("ifconfig %s" % ifname).stdout
+        mac_addr = ifconfig.split("\n")[0].split()[-1]
+        return mac_addr
+
+    def set_wpa_encryption(self, encryption):
+        """Set different encryptions to wpa or wpa2.
+
+        Args:
+          encryption: ccmp, tkip, or ccmp+tkip.
+        """
+        str_output = self.ssh.run("wifi status").stdout
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+
+        # Counting how many interface are enabled.
+        total_interface = 0
+        for radio in self.radios:
+            num_interface = len(wifi_status[radio]["interfaces"])
+            total_interface += num_interface
+
+        # Iterates every interface to get and set wpa encryption.
+        default_extra_interface = 2
+        for i in range(total_interface + default_extra_interface):
+            origin_encryption = self.ssh.run(
+                "uci get wireless.@wifi-iface[{}].encryption".format(i)
+            ).stdout
+            origin_psk_pattern = re.match(r"psk\b", origin_encryption)
+            target_psk_pattern = re.match(r"psk\b", encryption)
+            origin_psk2_pattern = re.match(r"psk2\b", origin_encryption)
+            target_psk2_pattern = re.match(r"psk2\b", encryption)
+
+            if origin_psk_pattern == target_psk_pattern:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].encryption={}".format(
+                        i, encryption
+                    )
+                )
+
+            if origin_psk2_pattern == target_psk2_pattern:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].encryption={}".format(
+                        i, encryption
+                    )
+                )
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def set_password(self, pwd_5g=None, pwd_2g=None):
+        """Set password for individual interface.
+
+        Args:
+            pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network.
+            pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network.
+        """
+        if pwd_5g:
+            if len(pwd_5g) < 8 or len(pwd_5g) > 63:
+                self.log.error("Password must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            elif not re.match("^[A-Za-z0-9]*$", pwd_5g):
+                self.log.error("Password must only contains ascii letters and digits")
+            else:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].key={}".format(3, pwd_5g)
+                )
+                self.log.info("Set 5G password to :{}".format(pwd_5g))
+
+        if pwd_2g:
+            if len(pwd_2g) < 8 or len(pwd_2g) > 63:
+                self.log.error("Password must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            elif not re.match("^[A-Za-z0-9]*$", pwd_2g):
+                self.log.error("Password must only contains ascii letters and digits")
+            else:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].key={}".format(2, pwd_2g)
+                )
+                self.log.info("Set 2G password to :{}".format(pwd_2g))
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def set_ssid(self, ssid_5g=None, ssid_2g=None):
+        """Set SSID for individual interface.
+
+        Args:
+            ssid_5g: 8 ~ 63 chars for 5g network.
+            ssid_2g: 8 ~ 63 chars for 2g network.
+        """
+        if ssid_5g:
+            if len(ssid_5g) < 8 or len(ssid_5g) > 63:
+                self.log.error("SSID must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            else:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].ssid={}".format(3, ssid_5g)
+                )
+                self.log.info("Set 5G SSID to :{}".format(ssid_5g))
+
+        if ssid_2g:
+            if len(ssid_2g) < 8 or len(ssid_2g) > 63:
+                self.log.error("SSID must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            else:
+                self.ssh.run(
+                    "uci set wireless.@wifi-iface[{}].ssid={}".format(2, ssid_2g)
+                )
+                self.log.info("Set 2G SSID to :{}".format(ssid_2g))
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def generate_mobility_domain(self):
+        """Generate 4-character hexadecimal ID.
+
+        Returns:
+          String; a 4-character hexadecimal ID.
+        """
+        md = "{:04x}".format(random.getrandbits(16))
+        self.log.info("Mobility Domain ID: {}".format(md))
+        return md
+
+    def enable_80211r(self, iface, md):
+        """Enable 802.11r for one single radio.
+
+        Args:
+          iface: index number of wifi-iface.
+                  2: radio1
+                  3: radio0
+          md: mobility domain. a 4-character hexadecimal ID.
+        Raises:
+          TestSkip if 2g or 5g radio is not up or 802.11r is not enabled.
+        """
+        str_output = self.ssh.run("wifi status").stdout
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+        # Check if the radio is up.
+        if iface == OpenWrtWifiSetting.IFACE_2G:
+            if wifi_status[self.radios[1]]["up"]:
+                self.log.info("2g network is ENABLED")
+            else:
+                raise signals.TestSkip("2g network is NOT ENABLED")
+        elif iface == OpenWrtWifiSetting.IFACE_5G:
+            if wifi_status[self.radios[0]]["up"]:
+                self.log.info("5g network is ENABLED")
+            else:
+                raise signals.TestSkip("5g network is NOT ENABLED")
+
+        # Setup 802.11r.
+        self.ssh.run("uci set wireless.@wifi-iface[{}].ieee80211r='1'".format(iface))
+        self.ssh.run(
+            "uci set wireless.@wifi-iface[{}].ft_psk_generate_local='1'".format(iface)
+        )
+        self.ssh.run(
+            "uci set wireless.@wifi-iface[{}].mobility_domain='{}'".format(iface, md)
+        )
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+        # Check if 802.11r is enabled.
+        result = self.ssh.run(
+            "uci get wireless.@wifi-iface[{}].ieee80211r".format(iface)
+        ).stdout
+        if result == "1":
+            self.log.info("802.11r is ENABLED")
+        else:
+            raise signals.TestSkip("802.11r is NOT ENABLED")
+
+    def generate_wireless_configs(self, wifi_configs):
+        """Generate wireless configs to configure.
+
+        Converts wifi_configs from configure_ap() to a list of 'WirelessConfig'
+        objects. Each object represents a wifi network to configure on the AP.
+
+        Args:
+          wifi_configs: Network list of different security types and bands.
+
+        Returns:
+          wireless configuration for openwrt AP.
+        """
+        num_2g = 1
+        num_5g = 1
+        wireless_configs = []
+
+        for i in range(len(wifi_configs)):
+            if hostapd_constants.BAND_2G in wifi_configs[i]:
+                config = wifi_configs[i][hostapd_constants.BAND_2G]
+                if config["security"] == PSK_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == PSK1_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == WEP_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            wep_key=config["wepKeys"][0],
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                elif config["security"] == OPEN_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                elif config["security"] == OWE_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+                elif config["security"] == SAE_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+                elif config["security"] == SAEMIXED_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == ENT_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_2G, num_2g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_2G,
+                            radius_server_ip=config["radius_server_ip"],
+                            radius_server_port=config["radius_server_port"],
+                            radius_server_secret=config["radius_server_secret"],
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                num_2g += 1
+            if hostapd_constants.BAND_5G in wifi_configs[i]:
+                config = wifi_configs[i][hostapd_constants.BAND_5G]
+                if config["security"] == PSK_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == PSK1_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == WEP_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            wep_key=config["wepKeys"][0],
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                elif config["security"] == OPEN_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                elif config["security"] == OWE_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+                elif config["security"] == SAE_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+                elif config["security"] == SAEMIXED_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            password=config["password"],
+                            hidden=config["hiddenSSID"],
+                            ieee80211w=config["ieee80211w"],
+                        )
+                    )
+                elif config["security"] == ENT_SECURITY:
+                    wireless_configs.append(
+                        wireless_config.WirelessConfig(
+                            "%s%s" % (WIFI_5G, num_5g),
+                            config["SSID"],
+                            config["security"],
+                            hostapd_constants.BAND_5G,
+                            radius_server_ip=config["radius_server_ip"],
+                            radius_server_port=config["radius_server_port"],
+                            radius_server_secret=config["radius_server_secret"],
+                            hidden=config["hiddenSSID"],
+                        )
+                    )
+                num_5g += 1
+
+        return wireless_configs
+
+    def get_wifi_network(self, security=None, band=None):
+        """Return first match wifi interface's config.
+
+        Args:
+          security: psk2 or none
+          band: '2g' or '5g'
+
+        Returns:
+          A dict contains match wifi interface's config.
+        """
+
+        for wifi_iface in self.wireless_setting.wireless_configs:
+            match_list = []
+            wifi_network = wifi_iface.__dict__
+            if security:
+                match_list.append(security == wifi_network["security"])
+            if band:
+                match_list.append(band == wifi_network["band"])
+
+            if all(match_list):
+                wifi_network["SSID"] = wifi_network["ssid"]
+                if not wifi_network["password"]:
+                    del wifi_network["password"]
+                return wifi_network
+        return None
+
+    def get_wifi_status(self):
+        """Check if radios are up. Default are 2G and 5G bands.
+
+        Returns:
+          True if both radios are up. False if not.
+        """
+        status = True
+        for radio in self.radios:
+            try:
+                str_output = self.ssh.run("wifi status %s" % radio).stdout
+                wifi_status = yaml.load(
+                    str_output.replace("\t", "").replace("\n", ""),
+                    Loader=yaml.SafeLoader,
+                )
+                status = wifi_status[radio]["up"] and status
+            except:
+                self.log.info("Failed to make ssh connection to the OpenWrt")
+                return False
+        return status
+
+    def verify_wifi_status(self, timeout=20):
+        """Ensure wifi interfaces are ready.
+
+        Args:
+          timeout: An integer that is the number of times to try
+                   wait for interface ready.
+        Returns:
+          True if both radios are up. False if not.
+        """
+        start_time = time.time()
+        end_time = start_time + timeout
+        while time.time() < end_time:
+            if self.get_wifi_status():
+                return True
+            time.sleep(1)
         return False
-    return status
 
-  def verify_wifi_status(self, timeout=20):
-    """Ensure wifi interfaces are ready.
+    def get_model_name(self):
+        """Get Openwrt model name.
 
-    Args:
-      timeout: An integer that is the number of times to try
-               wait for interface ready.
-    Returns:
-      True if both radios are up. False if not.
-    """
-    start_time = time.time()
-    end_time = start_time + timeout
-    while time.time() < end_time:
-      if self.get_wifi_status():
-        return True
-      time.sleep(1)
-    return False
+        Returns:
+          A string include device brand and model. e.g. NETGEAR_R8000
+        """
+        out = self.ssh.run(SYSTEM_INFO_CMD).stdout.split("\n")
+        for line in out:
+            if "board_name" in line:
+                model = line.split()[1].strip('",').split(",")
+                return "_".join(map(lambda i: i.upper(), model))
+        self.log.info("Failed to retrieve OpenWrt model information.")
+        return None
 
-  def get_model_name(self):
-    """Get Openwrt model name.
+    def close(self):
+        """Reset wireless and network settings to default and stop AP."""
+        if self.network_setting.config:
+            self.network_setting.cleanup_network_settings()
+        if self.wireless_setting:
+            self.wireless_setting.cleanup_wireless_settings()
 
-    Returns:
-      A string include device brand and model. e.g. NETGEAR_R8000
-    """
-    out = self.ssh.run(SYSTEM_INFO_CMD).stdout.split("\n")
-    for line in out:
-      if "board_name" in line:
-        model = (line.split()[1].strip("\",").split(","))
-        return "_".join(map(lambda i: i.upper(), model))
-    self.log.info("Failed to retrieve OpenWrt model information.")
-    return None
+    def close_ssh(self):
+        """Close SSH connection to AP."""
+        self.ssh.close()
 
-  def close(self):
-    """Reset wireless and network settings to default and stop AP."""
-    if self.network_setting.config:
-      self.network_setting.cleanup_network_settings()
-    if self.wireless_setting:
-      self.wireless_setting.cleanup_wireless_settings()
-
-  def close_ssh(self):
-    """Close SSH connection to AP."""
-    self.ssh.close()
-
-  def reboot(self):
-    """Reboot Openwrt."""
-    self.ssh.run("reboot")
-
+    def reboot(self):
+        """Reboot Openwrt."""
+        self.ssh.run("reboot")
diff --git a/src/antlion/controllers/openwrt_lib/network_const.py b/src/antlion/controllers/openwrt_lib/network_const.py
index 3aba0de..3b05b83 100644
--- a/src/antlion/controllers/openwrt_lib/network_const.py
+++ b/src/antlion/controllers/openwrt_lib/network_const.py
@@ -4,13 +4,10 @@
 IPSEC_CONF = {
     "config setup": {
         "charondebug": "chd 2,ike 2,knl 2,net 2,esp 2,dmn 2,"
-                       "mgr 2,lib 1,cfg 2,enc 1".__repr__(),
-        "uniqueids": "never"
+        "mgr 2,lib 1,cfg 2,enc 1".__repr__(),
+        "uniqueids": "never",
     },
-    "conn %default": {
-        "ike": "aes128-sha-modp1024",
-        "esp": "aes128-sha1"
-    }
+    "conn %default": {"ike": "aes128-sha-modp1024", "esp": "aes128-sha1"},
 }
 
 IPSEC_L2TP_PSK = {
@@ -24,7 +21,7 @@
         "rightprotoport": "17/%any",
         "rightsubnet": "0.0.0.0/0",
         "rightauth": "psk",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -40,7 +37,7 @@
         "rightprotoport": "17/%any",
         "rightsubnet": "0.0.0.0/0",
         "rightauth": "pubkey",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -102,7 +99,7 @@
         "right": "%any",
         "rightid": "vpntest",
         "rightauth": "eap-mschapv2",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -116,7 +113,7 @@
         "right": "%any",
         "rightid": "vpntest",
         "rightauth": "psk",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -133,7 +130,7 @@
         "rightid": "vpntest@%s" % LOCALHOST,
         "rightauth": "pubkey",
         "rightcert": "clientCert.pem",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -149,7 +146,7 @@
         "right": "%any",
         "rightid": "vpntest",
         "rightauth": "eap-mschapv2",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -163,7 +160,7 @@
         "right": "%any",
         "rightid": "vpntest",
         "rightauth": "psk",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -180,7 +177,7 @@
         "rightid": "vpntest@strongswan-vpn-server.android-iperf.com",
         "rightauth": "pubkey",
         "rightcert": "clientCert.pem",
-        "auto": "add"
+        "auto": "add",
     }
 }
 
@@ -228,14 +225,14 @@
     "proxyarp",
     "lcp-echo-interval 30",
     "lcp-echo-failure 4",
-    "nomppe"
+    "nomppe",
 )
 
 # iptable rules for vpn_pptp
 FIREWALL_RULES_FOR_PPTP = (
     "iptables -A input_rule -i ppp+ -j ACCEPT",
     "iptables -A output_rule -o ppp+ -j ACCEPT",
-    "iptables -A forwarding_rule -i ppp+ -j ACCEPT"
+    "iptables -A forwarding_rule -i ppp+ -j ACCEPT",
 )
 
 # iptable rules for vpn_l2tp
@@ -251,7 +248,7 @@
     "iptables -A INPUT -i eth0.2 -p udp --dport 4500 -j ACCEPT",
     "iptables -A INPUT -p udp --dport 500 -j ACCEPT",
     "iptables -A INPUT -p udp --dport 4500 -j ACCEPT",
-    "iptables -A INPUT -p udp -m policy --dir in --pol ipsec -m udp --dport 1701 -j ACCEPT"
+    "iptables -A INPUT -p udp -m policy --dir in --pol ipsec -m udp --dport 1701 -j ACCEPT",
 )
 
 FIREWALL_RULES_DISABLE_DNS_RESPONSE = (
@@ -275,13 +272,15 @@
         name: vpn server name for register in OpenWrt
     """
 
-    def __init__(self,
-                 vpn_server_hostname,
-                 vpn_server_address,
-                 vpn_username,
-                 vpn_password,
-                 psk_secret,
-                 server_name):
+    def __init__(
+        self,
+        vpn_server_hostname,
+        vpn_server_address,
+        vpn_username,
+        vpn_password,
+        psk_secret,
+        server_name,
+    ):
         self.name = server_name
         self.hostname = vpn_server_hostname
         self.address = vpn_server_address
diff --git a/src/antlion/controllers/openwrt_lib/network_settings.py b/src/antlion/controllers/openwrt_lib/network_settings.py
index 10ed891..5d14360 100644
--- a/src/antlion/controllers/openwrt_lib/network_settings.py
+++ b/src/antlion/controllers/openwrt_lib/network_settings.py
@@ -19,7 +19,6 @@
 from antlion import utils
 from antlion.controllers.openwrt_lib import network_const
 
-
 SERVICE_DNSMASQ = "dnsmasq"
 SERVICE_STUNNEL = "stunnel"
 SERVICE_NETWORK = "network"
@@ -92,7 +91,7 @@
             "block_dns_response": self.unblock_dns_response,
             "setup_mdns": self.remove_mdns,
             "add_dhcp_rapid_commit": self.remove_dhcp_rapid_commit,
-            "setup_captive_portal": self.remove_cpative_portal
+            "setup_captive_portal": self.remove_cpative_portal,
         }
         # This map contains cleanup functions to restore the configuration to
         # its default state. We write these keys to HISTORY_CONFIG_PATH prior to
@@ -130,8 +129,7 @@
         """Apply changes on Access point."""
         self.ssh.run("uci commit")
         self.service_manager.restart_services()
-        self.create_config_file("\n".join(self.config),
-                                HISTORY_CONFIG_PATH)
+        self.create_config_file("\n".join(self.config), HISTORY_CONFIG_PATH)
 
     def package_install(self, package_list):
         """Install packages on OpenWrtAP via opkg If not installed.
@@ -143,8 +141,10 @@
         self.ssh.run("opkg update")
         for package_name in package_list.split(" "):
             if not self._package_installed(package_name):
-                self.ssh.run("opkg install %s" % package_name,
-                             timeout=DEFAULT_PACKAGE_INSTALL_TIMEOUT)
+                self.ssh.run(
+                    "opkg install %s" % package_name,
+                    timeout=DEFAULT_PACKAGE_INSTALL_TIMEOUT,
+                )
                 self.log.info("Package: %s installed." % package_name)
             else:
                 self.log.info("Package: %s skipped (already installed)." % package_name)
@@ -185,8 +185,9 @@
             True if Existed.
         """
         path, file_name = abs_file_path.rsplit("/", 1)
-        if self.ssh.run("ls %s | grep %s" % (path, file_name),
-                        ignore_status=True).stdout:
+        if self.ssh.run(
+            "ls %s | grep %s" % (path, file_name), ignore_status=True
+        ).stdout:
             return True
         return False
 
@@ -211,7 +212,7 @@
         if not self.path_exists(abs_path):
             self.ssh.run("mkdir %s" % abs_path)
         else:
-            self.log.info("%s already existed." %abs_path)
+            self.log.info("%s already existed." % abs_path)
 
     def count(self, config, key):
         """Count in uci config.
@@ -222,8 +223,9 @@
         Returns:
             Numbers of the count.
         """
-        count = self.ssh.run("uci show %s | grep =%s" % (config, key),
-                             ignore_status=True).stdout
+        count = self.ssh.run(
+            "uci show %s | grep =%s" % (config, key), ignore_status=True
+        ).stdout
         return len(count.split("\n"))
 
     def create_config_file(self, config, file_path):
@@ -233,7 +235,7 @@
             config: A string of content of config.
             file_path: Config's abs_path.
         """
-        self.ssh.run("echo -e \"%s\" > %s" % (config, file_path))
+        self.ssh.run('echo -e "%s" > %s' % (config, file_path))
 
     def replace_config_option(self, old_option, new_option, file_path):
         """Replace config option if pattern match.
@@ -324,8 +326,7 @@
 
     def clear_resource_record(self):
         """Delete the all resource record."""
-        rr = self.ssh.run("uci show dhcp | grep =domain",
-                          ignore_status=True).stdout
+        rr = self.ssh.run("uci show dhcp | grep =domain", ignore_status=True).stdout
         if rr:
             for _ in rr.split("\n"):
                 self.del_resource_record()
@@ -408,27 +409,28 @@
         self.ssh.run("uci set pptpd.@login[0].password='%s'" % password)
         self.service_manager.need_restart(SERVICE_PPTPD)
 
-        self.replace_config_option(r"#*ms-dns \d+.\d+.\d+.\d+",
-                                   "ms-dns %s" % ms_dns, PPTPD_OPTION_PATH)
-        self.replace_config_option("(#no)*proxyarp",
-                                   "proxyarp", PPTPD_OPTION_PATH)
+        self.replace_config_option(
+            r"#*ms-dns \d+.\d+.\d+.\d+", "ms-dns %s" % ms_dns, PPTPD_OPTION_PATH
+        )
+        self.replace_config_option("(#no)*proxyarp", "proxyarp", PPTPD_OPTION_PATH)
 
     def restore_pptpd(self):
         """Disable pptpd."""
         self.ssh.run("uci set pptpd.pptpd.enabled=0")
-        self.remove_config_option(r"\S+ pptp-server \S+ \*",
-                                  PPP_CHAP_SECRET_PATH)
+        self.remove_config_option(r"\S+ pptp-server \S+ \*", PPP_CHAP_SECRET_PATH)
         self.service_manager.need_restart(SERVICE_PPTPD)
 
-    def setup_vpn_l2tp_server(self,
-                              vpn_server_hostname,
-                              vpn_server_address,
-                              vpn_username,
-                              vpn_password,
-                              psk_secret,
-                              server_name,
-                              country,
-                              org):
+    def setup_vpn_l2tp_server(
+        self,
+        vpn_server_hostname,
+        vpn_server_address,
+        vpn_username,
+        vpn_password,
+        psk_secret,
+        server_name,
+        country,
+        org,
+    ):
         """Setup l2tp vpn server on OpenWrt.
 
         Args:
@@ -441,12 +443,14 @@
             country: country code for generate cert keys.
             org: Organization name for generate cert keys.
         """
-        self.l2tp = network_const.VpnL2tp(vpn_server_hostname,
-                                          vpn_server_address,
-                                          vpn_username,
-                                          vpn_password,
-                                          psk_secret,
-                                          server_name)
+        self.l2tp = network_const.VpnL2tp(
+            vpn_server_hostname,
+            vpn_server_address,
+            vpn_username,
+            vpn_password,
+            psk_secret,
+            server_name,
+        )
 
         self.package_install(L2TP_PACKAGE)
         self.config.add("setup_vpn_l2tp_server")
@@ -499,19 +503,23 @@
             "       include strongswan.d/charon/*.conf",
             "   }",
             "   dns1=%s" % dns,
-            "}"
+            "}",
         ]
         self.create_config_file("\n".join(config), "/etc/strongswan.conf")
 
     def setup_ipsec(self):
         """Setup ipsec config."""
+
         def load_ipsec_config(data, rightsourceip=False):
             for i in data.keys():
                 config.append(i)
                 for j in data[i].keys():
                     config.append("\t %s=%s" % (j, data[i][j]))
                 if rightsourceip:
-                    config.append("\t rightsourceip=%s.16/26" % self.l2tp.address.rsplit(".", 1)[0])
+                    config.append(
+                        "\t rightsourceip=%s.16/26"
+                        % self.l2tp.address.rsplit(".", 1)[0]
+                    )
                 config.append("")
 
         config = []
@@ -532,8 +540,9 @@
         ipsec_secret = []
         ipsec_secret.append(r": PSK \"%s\"" % self.l2tp.psk_secret)
         ipsec_secret.append(r": RSA \"%s\"" % "serverKey.der")
-        ipsec_secret.append(r"%s : XAUTH \"%s\"" % (self.l2tp.username,
-                                                    self.l2tp.password))
+        ipsec_secret.append(
+            r"%s : XAUTH \"%s\"" % (self.l2tp.username, self.l2tp.password)
+        )
         self.create_config_file("\n".join(ipsec_secret), "/etc/ipsec.secrets")
 
     def setup_xl2tpd(self, ip_range=20):
@@ -542,9 +551,10 @@
         xl2tpd_conf = list(network_const.XL2TPD_CONF_GLOBAL)
         xl2tpd_conf.append("auth file = %s" % PPP_CHAP_SECRET_PATH)
         xl2tpd_conf.extend(network_const.XL2TPD_CONF_INS)
-        xl2tpd_conf.append("ip range = %s.%s-%s.%s" %
-                           (net_id, host_id, net_id,
-                            str(int(host_id)+ip_range)))
+        xl2tpd_conf.append(
+            "ip range = %s.%s-%s.%s"
+            % (net_id, host_id, net_id, str(int(host_id) + ip_range))
+        )
         xl2tpd_conf.append("local ip = %s" % self.l2tp.address)
         xl2tpd_conf.append("name = %s" % self.l2tp.name)
         xl2tpd_conf.append("pppoptfile = %s" % XL2TPD_OPTION_CONFIG_PATH)
@@ -552,16 +562,14 @@
         self.create_config_file("\n".join(xl2tpd_conf), XL2TPD_CONFIG_PATH)
         xl2tpd_option = list(network_const.XL2TPD_OPTION)
         xl2tpd_option.append("name %s" % self.l2tp.name)
-        self.create_config_file("\n".join(xl2tpd_option),
-                                XL2TPD_OPTION_CONFIG_PATH)
+        self.create_config_file("\n".join(xl2tpd_option), XL2TPD_OPTION_CONFIG_PATH)
 
     def setup_ppp_secret(self):
         self.replace_config_option(
             r"\S+ %s \S+ \*" % self.l2tp.name,
-            "%s %s %s *" % (self.l2tp.username,
-                            self.l2tp.name,
-                            self.l2tp.password),
-            PPP_CHAP_SECRET_PATH)
+            "%s %s %s *" % (self.l2tp.username, self.l2tp.name, self.l2tp.password),
+            PPP_CHAP_SECRET_PATH,
+        )
 
     def generate_vpn_cert_keys(self, country, org):
         """Generate cert and keys for vpn server."""
@@ -569,25 +577,32 @@
         lifetime = "--lifetime 365"
         size = "--size 4096"
 
-        self.ssh.run("ipsec pki --gen %s %s --outform der > caKey.der" %
-                     (rsa, size))
-        self.ssh.run("ipsec pki --self --ca %s --in caKey.der %s --dn "
-                     "\"C=%s, O=%s, CN=%s\" --outform der > caCert.der" %
-                     (lifetime, rsa, country, org, self.l2tp.hostname))
-        self.ssh.run("ipsec pki --gen %s %s --outform der > serverKey.der" %
-                     (size, rsa))
-        self.ssh.run("ipsec pki --pub --in serverKey.der %s | ipsec pki "
-                     "--issue %s --cacert caCert.der --cakey caKey.der "
-                     "--dn \"C=%s, O=%s, CN=%s\" --san %s --flag serverAuth"
-                     " --flag ikeIntermediate --outform der > serverCert.der" %
-                     (rsa, lifetime, country, org, self.l2tp.hostname, LOCALHOST))
-        self.ssh.run("ipsec pki --gen %s %s --outform der > clientKey.der" %
-                     (size, rsa))
-        self.ssh.run("ipsec pki --pub --in clientKey.der %s | ipsec pki "
-                     "--issue %s --cacert caCert.der --cakey caKey.der "
-                     "--dn \"C=%s, O=%s, CN=%s@%s\" --outform der > "
-                     "clientCert.der" % (rsa, lifetime, country, org,
-                                         self.l2tp.username, self.l2tp.hostname))
+        self.ssh.run("ipsec pki --gen %s %s --outform der > caKey.der" % (rsa, size))
+        self.ssh.run(
+            "ipsec pki --self --ca %s --in caKey.der %s --dn "
+            '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
+            % (lifetime, rsa, country, org, self.l2tp.hostname)
+        )
+        self.ssh.run(
+            "ipsec pki --gen %s %s --outform der > serverKey.der" % (size, rsa)
+        )
+        self.ssh.run(
+            "ipsec pki --pub --in serverKey.der %s | ipsec pki "
+            "--issue %s --cacert caCert.der --cakey caKey.der "
+            '--dn "C=%s, O=%s, CN=%s" --san %s --flag serverAuth'
+            " --flag ikeIntermediate --outform der > serverCert.der"
+            % (rsa, lifetime, country, org, self.l2tp.hostname, LOCALHOST)
+        )
+        self.ssh.run(
+            "ipsec pki --gen %s %s --outform der > clientKey.der" % (size, rsa)
+        )
+        self.ssh.run(
+            "ipsec pki --pub --in clientKey.der %s | ipsec pki "
+            "--issue %s --cacert caCert.der --cakey caKey.der "
+            '--dn "C=%s, O=%s, CN=%s@%s" --outform der > '
+            "clientCert.der"
+            % (rsa, lifetime, country, org, self.l2tp.username, self.l2tp.hostname)
+        )
 
         self.ssh.run(
             "openssl rsa -inform DER -in clientKey.der"
@@ -598,8 +613,7 @@
             " -out clientCert.pem -outform PEM"
         )
         self.ssh.run(
-            "openssl x509 -inform DER -in caCert.der"
-            " -out caCert.pem -outform PEM"
+            "openssl x509 -inform DER -in caCert.der" " -out caCert.pem -outform PEM"
         )
         self.ssh.run(
             "openssl pkcs12 -in clientCert.pem -inkey  clientKey.pem"
@@ -625,29 +639,45 @@
         ikev2_vpn_cert_keys = [
             "ipsec pki --gen %s %s --outform der > caKey.der" % (rsa, size),
             "ipsec pki --self --ca %s --in caKey.der %s --dn "
-            "\"C=%s, O=%s, CN=%s\" --outform der > caCert.der" %
-            (lifetime, rsa, country, org, self.l2tp.hostname),
+            '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
+            % (lifetime, rsa, country, org, self.l2tp.hostname),
             "ipsec pki --gen %s %s --outform der > serverKey.der" % (size, rsa),
             "ipsec pki --pub --in serverKey.der %s | ipsec pki --issue %s "
             r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s\" "
             "--san %s --san %s --flag serverAuth --flag ikeIntermediate "
-            "--outform der > serverCert.der" % (rsa, lifetime, country, org,
-                                                self.l2tp.hostname, LOCALHOST,
-                                                self.l2tp.hostname),
+            "--outform der > serverCert.der"
+            % (
+                rsa,
+                lifetime,
+                country,
+                org,
+                self.l2tp.hostname,
+                LOCALHOST,
+                self.l2tp.hostname,
+            ),
             "ipsec pki --gen %s %s --outform der > clientKey.der" % (size, rsa),
             "ipsec pki --pub --in clientKey.der %s | ipsec pki --issue %s "
             r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s@%s\" "
             r"--san \"%s\" --san \"%s@%s\" --san \"%s@%s\" --outform der "
-            "> clientCert.der" % (rsa, lifetime, country, org, self.l2tp.username,
-                                  self.l2tp.hostname, self.l2tp.username,
-                                  self.l2tp.username, LOCALHOST,
-                                  self.l2tp.username, self.l2tp.hostname),
+            "> clientCert.der"
+            % (
+                rsa,
+                lifetime,
+                country,
+                org,
+                self.l2tp.username,
+                self.l2tp.hostname,
+                self.l2tp.username,
+                self.l2tp.username,
+                LOCALHOST,
+                self.l2tp.username,
+                self.l2tp.hostname,
+            ),
             "openssl rsa -inform DER -in clientKey.der "
             "-out clientKey.pem -outform PEM",
             "openssl x509 -inform DER -in clientCert.der "
             "-out clientCert.pem -outform PEM",
-            "openssl x509 -inform DER -in caCert.der "
-            "-out caCert.pem -outform PEM",
+            "openssl x509 -inform DER -in caCert.der " "-out caCert.pem -outform PEM",
             "openssl pkcs12 -in clientCert.pem -inkey  clientKey.pem "
             "-certfile caCert.pem -export -out clientPkcs.p12 -passout pass:",
             "mv caCert.pem /etc/ipsec.d/cacerts/",
@@ -697,12 +727,14 @@
         """Restore firewall for vpn pptp server."""
         self.update_firewall_rules_list()
         if "pptpd" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("pptpd"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]" % self.firewall_rules_list.index("pptpd")
+            )
         self.update_firewall_rules_list()
         if "GRP" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("GRP"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]" % self.firewall_rules_list.index("GRP")
+            )
         self.remove_custom_firewall_rules()
         self.service_manager.need_restart(SERVICE_FIREWALL)
 
@@ -733,10 +765,11 @@
 
         net_id = self.l2tp.address.rsplit(".", 1)[0]
         iptable_rules = list(network_const.FIREWALL_RULES_FOR_L2TP)
-        iptable_rules.append("iptables -A FORWARD -s %s.0/24"
-                             "  -j ACCEPT" % net_id)
-        iptable_rules.append("iptables -t nat -A POSTROUTING"
-                             " -s %s.0/24 -o eth0.2 -j MASQUERADE" % net_id)
+        iptable_rules.append("iptables -A FORWARD -s %s.0/24" "  -j ACCEPT" % net_id)
+        iptable_rules.append(
+            "iptables -t nat -A POSTROUTING"
+            " -s %s.0/24 -o eth0.2 -j MASQUERADE" % net_id
+        )
 
         self.add_custom_firewall_rules(iptable_rules)
         self.service_manager.need_restart(SERVICE_FIREWALL)
@@ -745,16 +778,22 @@
         """Restore firewall for vpn l2tp server."""
         self.update_firewall_rules_list()
         if "ipsec esp" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("ipsec esp"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]"
+                % self.firewall_rules_list.index("ipsec esp")
+            )
         self.update_firewall_rules_list()
         if "ipsec nat-t" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("ipsec nat-t"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]"
+                % self.firewall_rules_list.index("ipsec nat-t")
+            )
         self.update_firewall_rules_list()
         if "auth header" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("auth header"))
+            self.ssh.run(
+                "uci del firewall.@rule[%s]"
+                % self.firewall_rules_list.index("auth header")
+            )
         self.remove_custom_firewall_rules()
         self.service_manager.need_restart(SERVICE_FIREWALL)
 
@@ -764,22 +803,20 @@
         Args:
             rules: A list of iptable rules to apply.
         """
-        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH+".backup"
+        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH + ".backup"
         if not self.file_exists(backup_file_path):
-            self.ssh.run("mv %s %s" % (FIREWALL_CUSTOM_OPTION_PATH,
-                                       backup_file_path))
+            self.ssh.run("mv %s %s" % (FIREWALL_CUSTOM_OPTION_PATH, backup_file_path))
         for rule in rules:
             self.ssh.run("echo %s >> %s" % (rule, FIREWALL_CUSTOM_OPTION_PATH))
 
     def remove_custom_firewall_rules(self):
         """Clean up and recover custom firewall rules."""
-        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH+".backup"
+        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH + ".backup"
         if self.file_exists(backup_file_path):
-            self.ssh.run("mv %s %s" % (backup_file_path,
-                                       FIREWALL_CUSTOM_OPTION_PATH))
+            self.ssh.run("mv %s %s" % (backup_file_path, FIREWALL_CUSTOM_OPTION_PATH))
         else:
             self.log.debug("Did not find %s" % backup_file_path)
-            self.ssh.run("echo "" > %s" % FIREWALL_CUSTOM_OPTION_PATH)
+            self.ssh.run("echo " " > %s" % FIREWALL_CUSTOM_OPTION_PATH)
 
     def disable_pptp_service(self):
         """Disable pptp service."""
@@ -791,7 +828,7 @@
         self.ssh.run("uci set network.lan2.type=bridge")
         self.ssh.run("uci set network.lan2.ifname=eth1.2")
         self.ssh.run("uci set network.lan2.proto=static")
-        self.ssh.run("uci set network.lan2.ipaddr=\"%s\"" % self.l2tp.address)
+        self.ssh.run('uci set network.lan2.ipaddr="%s"' % self.l2tp.address)
         self.ssh.run("uci set network.lan2.netmask=255.255.255.0")
         self.ssh.run("uci set network.lan2=interface")
         self.service_manager.reload(SERVICE_NETWORK)
@@ -855,10 +892,10 @@
             self.commit_changes()
 
     def _add_dhcp_option(self, args):
-        self.ssh.run("uci add_list dhcp.lan.dhcp_option=\"%s\"" % args)
+        self.ssh.run('uci add_list dhcp.lan.dhcp_option="%s"' % args)
 
     def _remove_dhcp_option(self, args):
-        self.ssh.run("uci del_list dhcp.lan.dhcp_option=\"%s\"" % args)
+        self.ssh.run('uci del_list dhcp.lan.dhcp_option="%s"' % args)
 
     def add_default_dns(self, addr_list):
         """Add default dns server for client.
@@ -888,7 +925,7 @@
         Args:
             addr_list: dns ip address for Openwrt client.
         """
-        self.ssh.run("uci add_list dhcp.lan.dns=\"%s\"" % addr_list)
+        self.ssh.run('uci add_list dhcp.lan.dns="%s"' % addr_list)
         self.config.add("default_v6_dns %s" % addr_list)
         self.service_manager.need_restart(SERVICE_ODHCPD)
         self.commit_changes()
@@ -899,7 +936,7 @@
         Args:
             addr_list: dns ip address for Openwrt client.
         """
-        self.ssh.run("uci del_list dhcp.lan.dns=\"%s\"" % addr_list)
+        self.ssh.run('uci del_list dhcp.lan.dns="%s"' % addr_list)
         self.config.add("default_v6_dns %s" % addr_list)
         self.service_manager.need_restart(SERVICE_ODHCPD)
         self.commit_changes()
@@ -917,19 +954,19 @@
         self.commit_changes()
 
     def add_dhcp_rapid_commit(self):
-        self.create_config_file("dhcp-rapid-commit\n","/etc/dnsmasq.conf")
+        self.create_config_file("dhcp-rapid-commit\n", "/etc/dnsmasq.conf")
         self.config.add("add_dhcp_rapid_commit")
         self.service_manager.need_restart(SERVICE_DNSMASQ)
         self.commit_changes()
 
     def remove_dhcp_rapid_commit(self):
-        self.create_config_file("","/etc/dnsmasq.conf")
+        self.create_config_file("", "/etc/dnsmasq.conf")
         self.config.discard("add_dhcp_rapid_commit")
         self.service_manager.need_restart(SERVICE_DNSMASQ)
         self.commit_changes()
 
     def start_tcpdump(self, test_name, args="", interface="br-lan"):
-        """"Start tcpdump on OpenWrt.
+        """ "Start tcpdump on OpenWrt.
 
         Args:
             test_name: Test name for create tcpdump file name.
@@ -942,8 +979,10 @@
         self.package_install("tcpdump")
         if not self.path_exists(TCPDUMP_DIR):
             self.ssh.run("mkdir %s" % TCPDUMP_DIR)
-        tcpdump_file_name = "openwrt_%s_%s.pcap" % (test_name,
-                                                    time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())))
+        tcpdump_file_name = "openwrt_%s_%s.pcap" % (
+            test_name,
+            time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())),
+        )
         tcpdump_file_path = "".join([TCPDUMP_DIR, tcpdump_file_name])
         cmd = "tcpdump -i %s -s0 %s -w %s" % (interface, args, tcpdump_file_path)
         self.ssh.run_async(cmd)
@@ -988,7 +1027,9 @@
 
     def _get_tcpdump_pid(self, tcpdump_file_name):
         """Check tcpdump process on OpenWrt."""
-        return self.ssh.run("pgrep -f %s" % (tcpdump_file_name), ignore_status=True).stdout
+        return self.ssh.run(
+            "pgrep -f %s" % (tcpdump_file_name), ignore_status=True
+        ).stdout
 
     def setup_mdns(self):
         self.config.add("setup_mdns")
@@ -1013,7 +1054,7 @@
         self.service_manager.need_restart(SERVICE_FIREWALL)
         self.commit_changes()
 
-    def setup_captive_portal(self, fas_fdqn,fas_port=2080):
+    def setup_captive_portal(self, fas_fdqn, fas_port=2080):
         """Create captive portal with Forwarding Authentication Service.
 
         Args:
@@ -1026,7 +1067,7 @@
         self.ssh.run("uci set opennds.@opennds[0].gatewayport=2050")
         self.ssh.run("uci set opennds.@opennds[0].fasport=%s" % fas_port)
         self.ssh.run("uci set opennds.@opennds[0].fasremotefqdn=%s" % fas_fdqn)
-        self.ssh.run("uci set opennds.@opennds[0].faspath=\"/nds/fas-aes.php\"")
+        self.ssh.run('uci set opennds.@opennds[0].faspath="/nds/fas-aes.php"')
         self.ssh.run("uci set opennds.@opennds[0].faskey=1234567890")
         self.service_manager.need_restart(SERVICE_OPENNDS)
         # Config uhttpd
@@ -1055,8 +1096,8 @@
         self.clear_resource_record()
         # Restore uhttpd
         self.ssh.run("uci del uhttpd.main.interpreter")
-        self.ssh.run("uci del_list uhttpd.main.listen_http=\'0.0.0.0:%s\'" % fas_port)
-        self.ssh.run("uci del_list uhttpd.main.listen_http=\'[::]:%s\'" % fas_port)
+        self.ssh.run("uci del_list uhttpd.main.listen_http='0.0.0.0:%s'" % fas_port)
+        self.ssh.run("uci del_list uhttpd.main.listen_http='[::]:%s'" % fas_port)
         self.service_manager.need_restart(SERVICE_UHTTPD)
         # Clean web root
         self.ssh.run("rm -r /www/nds")
@@ -1067,9 +1108,9 @@
 class ServiceManager(object):
     """Class for service on OpenWrt.
 
-        Attributes:
-        ssh: ssh object for the AP.
-        _need_restart: Record service need to restart.
+    Attributes:
+    ssh: ssh object for the AP.
+    _need_restart: Record service need to restart.
     """
 
     def __init__(self, ssh):
diff --git a/src/antlion/controllers/openwrt_lib/openwrt_constants.py b/src/antlion/controllers/openwrt_lib/openwrt_constants.py
index 8fd7686..3b591d5 100644
--- a/src/antlion/controllers/openwrt_lib/openwrt_constants.py
+++ b/src/antlion/controllers/openwrt_lib/openwrt_constants.py
@@ -18,21 +18,21 @@
 
 
 class OpenWrtWifiSecurity:
-  # Used by OpenWrt AP
-  WPA_PSK_DEFAULT = "psk"
-  WPA_PSK_CCMP = "psk+ccmp"
-  WPA_PSK_TKIP = "psk+tkip"
-  WPA_PSK_TKIP_AND_CCMP = "psk+tkip+ccmp"
-  WPA2_PSK_DEFAULT = "psk2"
-  WPA2_PSK_CCMP = "psk2+ccmp"
-  WPA2_PSK_TKIP = "psk2+tkip"
-  WPA2_PSK_TKIP_AND_CCMP = "psk2+tkip+ccmp"
+    # Used by OpenWrt AP
+    WPA_PSK_DEFAULT = "psk"
+    WPA_PSK_CCMP = "psk+ccmp"
+    WPA_PSK_TKIP = "psk+tkip"
+    WPA_PSK_TKIP_AND_CCMP = "psk+tkip+ccmp"
+    WPA2_PSK_DEFAULT = "psk2"
+    WPA2_PSK_CCMP = "psk2+ccmp"
+    WPA2_PSK_TKIP = "psk2+tkip"
+    WPA2_PSK_TKIP_AND_CCMP = "psk2+tkip+ccmp"
 
 
 class OpenWrtWifiSetting:
-  IFACE_2G = 2
-  IFACE_5G = 3
+    IFACE_2G = 2
+    IFACE_5G = 3
 
 
 class OpenWrtModelMap:
-  NETGEAR_R8000 = ("radio2", "radio1")
+    NETGEAR_R8000 = ("radio2", "radio1")
diff --git a/src/antlion/controllers/openwrt_lib/wireless_config.py b/src/antlion/controllers/openwrt_lib/wireless_config.py
index 7810fa2..9cdb309 100644
--- a/src/antlion/controllers/openwrt_lib/wireless_config.py
+++ b/src/antlion/controllers/openwrt_lib/wireless_config.py
@@ -4,50 +4,50 @@
 
 
 class WirelessConfig(object):
-  """Creates an object to hold wireless config.
+    """Creates an object to hold wireless config.
 
-  Attributes:
-    name: name of the wireless config
-    ssid: SSID of the network.
-    security: security of the wifi network.
-    band: band of the wifi network.
-    iface: network interface of the wifi network.
-    password: password for psk network.
-    wep_key: wep keys for wep network.
-    wep_key_num: key number for wep network.
-    radius_server_ip: IP address of radius server.
-    radius_server_port: Port number of radius server.
-    radius_server_secret: Secret key of radius server.
-    hidden: Boolean, if the wifi network is hidden.
-    ieee80211w: PMF bit of the wifi network.
-  """
+    Attributes:
+      name: name of the wireless config
+      ssid: SSID of the network.
+      security: security of the wifi network.
+      band: band of the wifi network.
+      iface: network interface of the wifi network.
+      password: password for psk network.
+      wep_key: wep keys for wep network.
+      wep_key_num: key number for wep network.
+      radius_server_ip: IP address of radius server.
+      radius_server_port: Port number of radius server.
+      radius_server_secret: Secret key of radius server.
+      hidden: Boolean, if the wifi network is hidden.
+      ieee80211w: PMF bit of the wifi network.
+    """
 
-  def __init__(
-      self,
-      name,
-      ssid,
-      security,
-      band,
-      iface=NET_IFACE,
-      password=None,
-      wep_key=None,
-      wep_key_num=1,
-      radius_server_ip=None,
-      radius_server_port=None,
-      radius_server_secret=None,
-      hidden=False,
-      ieee80211w=None):
-    self.name = name
-    self.ssid = ssid
-    self.security = security
-    self.band = band
-    self.iface = iface
-    self.password = password
-    self.wep_key = wep_key
-    self.wep_key_num = wep_key_num
-    self.radius_server_ip = radius_server_ip
-    self.radius_server_port = radius_server_port
-    self.radius_server_secret = radius_server_secret
-    self.hidden = hidden
-    self.ieee80211w = ieee80211w
-
+    def __init__(
+        self,
+        name,
+        ssid,
+        security,
+        band,
+        iface=NET_IFACE,
+        password=None,
+        wep_key=None,
+        wep_key_num=1,
+        radius_server_ip=None,
+        radius_server_port=None,
+        radius_server_secret=None,
+        hidden=False,
+        ieee80211w=None,
+    ):
+        self.name = name
+        self.ssid = ssid
+        self.security = security
+        self.band = band
+        self.iface = iface
+        self.password = password
+        self.wep_key = wep_key
+        self.wep_key_num = wep_key_num
+        self.radius_server_ip = radius_server_ip
+        self.radius_server_port = radius_server_port
+        self.radius_server_secret = radius_server_secret
+        self.hidden = hidden
+        self.ieee80211w = ieee80211w
diff --git a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py b/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
index ad36482..d899a30 100644
--- a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
+++ b/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
@@ -6,7 +6,6 @@
 from antlion.controllers.openwrt_lib.network_settings import SERVICE_DNSMASQ
 from antlion.controllers.openwrt_lib.network_settings import ServiceManager
 
-
 LEASE_FILE = "/tmp/dhcp.leases"
 OPEN_SECURITY = "none"
 PSK1_SECURITY = "psk"
@@ -24,131 +23,174 @@
 
 
 class WirelessSettingsApplier(object):
-  """Class for wireless settings.
+    """Class for wireless settings.
 
-  Attributes:
-    ssh: ssh object for the AP.
-    service_manager: Object manage service configuration
-    wireless_configs: a list of
-      antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
-    channel_2g: channel for 2G band.
-    channel_5g: channel for 5G band.
-  """
-
-  def __init__(self, ssh, configs, channel_2g, channel_5g, radio_2g=RADIO_2G, radio_5g=RADIO_5G):
-    """Initialize wireless settings.
-
-    Args:
-      ssh: ssh connection object.
-      configs: a list of
+    Attributes:
+      ssh: ssh object for the AP.
+      service_manager: Object manage service configuration
+      wireless_configs: a list of
         antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
       channel_2g: channel for 2G band.
       channel_5g: channel for 5G band.
     """
-    self.ssh = ssh
-    self.service_manager = ServiceManager(ssh)
-    self.wireless_configs = configs
-    self.channel_2g = channel_2g
-    self.channel_5g = channel_5g
-    self.radio_2g = radio_2g
-    self.radio_5g = radio_5g
 
-  def apply_wireless_settings(self):
-    """Configure wireless settings from a list of configs."""
-    default_2g_iface = "default_" + self.radio_2g
-    default_5g_iface = "default_" + self.radio_5g
+    def __init__(
+        self, ssh, configs, channel_2g, channel_5g, radio_2g=RADIO_2G, radio_5g=RADIO_5G
+    ):
+        """Initialize wireless settings.
 
-    # set channels for 2G and 5G bands
-    self.ssh.run("uci set wireless.%s.channel='%s'" % (self.radio_2g, self.channel_2g))
-    self.ssh.run("uci set wireless.%s.channel='%s'" % (self.radio_5g, self.channel_5g))
-    if self.channel_5g == 165:
-      self.ssh.run("uci set wireless.%s.htmode='VHT20'" % self.radio_5g)
-    elif self.channel_5g == 132 or self.channel_5g == 136:
-      self.ssh.run("iw reg set ZA")
-      self.ssh.run("uci set wireless.%s.htmode='VHT40'" % self.radio_5g)
+        Args:
+          ssh: ssh connection object.
+          configs: a list of
+            antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
+          channel_2g: channel for 2G band.
+          channel_5g: channel for 5G band.
+        """
+        self.ssh = ssh
+        self.service_manager = ServiceManager(ssh)
+        self.wireless_configs = configs
+        self.channel_2g = channel_2g
+        self.channel_5g = channel_5g
+        self.radio_2g = radio_2g
+        self.radio_5g = radio_5g
 
-    if self.channel_2g == 13:
-      self.ssh.run("iw reg set AU")
+    def apply_wireless_settings(self):
+        """Configure wireless settings from a list of configs."""
+        default_2g_iface = "default_" + self.radio_2g
+        default_5g_iface = "default_" + self.radio_5g
 
-    # disable default OpenWrt SSID
-    self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                 (default_2g_iface, DISABLE_RADIO))
-    self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                 (default_5g_iface, DISABLE_RADIO))
+        # set channels for 2G and 5G bands
+        self.ssh.run(
+            "uci set wireless.%s.channel='%s'" % (self.radio_2g, self.channel_2g)
+        )
+        self.ssh.run(
+            "uci set wireless.%s.channel='%s'" % (self.radio_5g, self.channel_5g)
+        )
+        if self.channel_5g == 165:
+            self.ssh.run("uci set wireless.%s.htmode='VHT20'" % self.radio_5g)
+        elif self.channel_5g == 132 or self.channel_5g == 136:
+            self.ssh.run("iw reg set ZA")
+            self.ssh.run("uci set wireless.%s.htmode='VHT40'" % self.radio_5g)
 
-    # Enable radios
-    self.ssh.run("uci set wireless.%s.disabled='%s'" % (self.radio_2g, ENABLE_RADIO))
-    self.ssh.run("uci set wireless.%s.disabled='%s'" % (self.radio_5g, ENABLE_RADIO))
+        if self.channel_2g == 13:
+            self.ssh.run("iw reg set AU")
 
-    for config in self.wireless_configs:
+        # disable default OpenWrt SSID
+        self.ssh.run(
+            "uci set wireless.%s.disabled='%s'" % (default_2g_iface, DISABLE_RADIO)
+        )
+        self.ssh.run(
+            "uci set wireless.%s.disabled='%s'" % (default_5g_iface, DISABLE_RADIO)
+        )
 
-      # configure open network
-      if config.security == OPEN_SECURITY:
-        if config.band == hostapd_constants.BAND_2G:
-          self.ssh.run("uci set wireless.%s.ssid='%s'" %
-                       (default_2g_iface, config.ssid))
-          self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                       (default_2g_iface, ENABLE_RADIO))
-          if config.hidden:
-            self.ssh.run("uci set wireless.%s.hidden='%s'" %
-                         (default_2g_iface, ENABLE_HIDDEN))
-        elif config.band == hostapd_constants.BAND_5G:
-          self.ssh.run("uci set wireless.%s.ssid='%s'" %
-                       (default_5g_iface, config.ssid))
-          self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                       (default_5g_iface, ENABLE_RADIO))
-          if config.hidden:
-            self.ssh.run("uci set wireless.%s.hidden='%s'" %
-                         (default_5g_iface, ENABLE_HIDDEN))
-        continue
+        # Enable radios
+        self.ssh.run(
+            "uci set wireless.%s.disabled='%s'" % (self.radio_2g, ENABLE_RADIO)
+        )
+        self.ssh.run(
+            "uci set wireless.%s.disabled='%s'" % (self.radio_5g, ENABLE_RADIO)
+        )
 
-      self.ssh.run("uci set wireless.%s='wifi-iface'" % config.name)
-      if config.band == hostapd_constants.BAND_2G:
-        self.ssh.run("uci set wireless.%s.device='%s'" % (config.name, self.radio_2g))
-      else:
-        self.ssh.run("uci set wireless.%s.device='%s'" % (config.name, self.radio_5g))
-      self.ssh.run("uci set wireless.%s.network='%s'" %
-                   (config.name, config.iface))
-      self.ssh.run("uci set wireless.%s.mode='ap'" % config.name)
-      self.ssh.run("uci set wireless.%s.ssid='%s'" %
-                   (config.name, config.ssid))
-      self.ssh.run("uci set wireless.%s.encryption='%s'" %
-                   (config.name, config.security))
-      if config.security == PSK_SECURITY or config.security == SAE_SECURITY\
-              or config.security == PSK1_SECURITY\
-              or config.security == SAEMIXED_SECURITY:
-        self.ssh.run("uci set wireless.%s.key='%s'" %
-                     (config.name, config.password))
-      elif config.security == WEP_SECURITY:
-        self.ssh.run("uci set wireless.%s.key%s='%s'" %
-                     (config.name, config.wep_key_num, config.wep_key))
-        self.ssh.run("uci set wireless.%s.key='%s'" %
-                     (config.name, config.wep_key_num))
-      elif config.security == ENT_SECURITY:
-        self.ssh.run("uci set wireless.%s.auth_secret='%s'" %
-                     (config.name, config.radius_server_secret))
-        self.ssh.run("uci set wireless.%s.auth_server='%s'" %
-                     (config.name, config.radius_server_ip))
-        self.ssh.run("uci set wireless.%s.auth_port='%s'" %
-                     (config.name, config.radius_server_port))
-      if config.ieee80211w:
-        self.ssh.run("uci set wireless.%s.ieee80211w='%s'" %
-                     (config.name, config.ieee80211w))
-      if config.hidden:
-        self.ssh.run("uci set wireless.%s.hidden='%s'" %
-                     (config.name, ENABLE_HIDDEN))
+        for config in self.wireless_configs:
+            # configure open network
+            if config.security == OPEN_SECURITY:
+                if config.band == hostapd_constants.BAND_2G:
+                    self.ssh.run(
+                        "uci set wireless.%s.ssid='%s'"
+                        % (default_2g_iface, config.ssid)
+                    )
+                    self.ssh.run(
+                        "uci set wireless.%s.disabled='%s'"
+                        % (default_2g_iface, ENABLE_RADIO)
+                    )
+                    if config.hidden:
+                        self.ssh.run(
+                            "uci set wireless.%s.hidden='%s'"
+                            % (default_2g_iface, ENABLE_HIDDEN)
+                        )
+                elif config.band == hostapd_constants.BAND_5G:
+                    self.ssh.run(
+                        "uci set wireless.%s.ssid='%s'"
+                        % (default_5g_iface, config.ssid)
+                    )
+                    self.ssh.run(
+                        "uci set wireless.%s.disabled='%s'"
+                        % (default_5g_iface, ENABLE_RADIO)
+                    )
+                    if config.hidden:
+                        self.ssh.run(
+                            "uci set wireless.%s.hidden='%s'"
+                            % (default_5g_iface, ENABLE_HIDDEN)
+                        )
+                continue
 
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("cp %s %s.tmp" % (LEASE_FILE, LEASE_FILE))
+            self.ssh.run("uci set wireless.%s='wifi-iface'" % config.name)
+            if config.band == hostapd_constants.BAND_2G:
+                self.ssh.run(
+                    "uci set wireless.%s.device='%s'" % (config.name, self.radio_2g)
+                )
+            else:
+                self.ssh.run(
+                    "uci set wireless.%s.device='%s'" % (config.name, self.radio_5g)
+                )
+            self.ssh.run(
+                "uci set wireless.%s.network='%s'" % (config.name, config.iface)
+            )
+            self.ssh.run("uci set wireless.%s.mode='ap'" % config.name)
+            self.ssh.run("uci set wireless.%s.ssid='%s'" % (config.name, config.ssid))
+            self.ssh.run(
+                "uci set wireless.%s.encryption='%s'" % (config.name, config.security)
+            )
+            if (
+                config.security == PSK_SECURITY
+                or config.security == SAE_SECURITY
+                or config.security == PSK1_SECURITY
+                or config.security == SAEMIXED_SECURITY
+            ):
+                self.ssh.run(
+                    "uci set wireless.%s.key='%s'" % (config.name, config.password)
+                )
+            elif config.security == WEP_SECURITY:
+                self.ssh.run(
+                    "uci set wireless.%s.key%s='%s'"
+                    % (config.name, config.wep_key_num, config.wep_key)
+                )
+                self.ssh.run(
+                    "uci set wireless.%s.key='%s'" % (config.name, config.wep_key_num)
+                )
+            elif config.security == ENT_SECURITY:
+                self.ssh.run(
+                    "uci set wireless.%s.auth_secret='%s'"
+                    % (config.name, config.radius_server_secret)
+                )
+                self.ssh.run(
+                    "uci set wireless.%s.auth_server='%s'"
+                    % (config.name, config.radius_server_ip)
+                )
+                self.ssh.run(
+                    "uci set wireless.%s.auth_port='%s'"
+                    % (config.name, config.radius_server_port)
+                )
+            if config.ieee80211w:
+                self.ssh.run(
+                    "uci set wireless.%s.ieee80211w='%s'"
+                    % (config.name, config.ieee80211w)
+                )
+            if config.hidden:
+                self.ssh.run(
+                    "uci set wireless.%s.hidden='%s'" % (config.name, ENABLE_HIDDEN)
+                )
 
-  def cleanup_wireless_settings(self):
-    """Reset wireless settings to default."""
-    self.ssh.run("wifi down")
-    self.ssh.run("rm -f /etc/config/wireless")
-    self.ssh.run("wifi config")
-    if self.channel_5g == 132:
-      self.ssh.run("iw reg set US")
-    self.ssh.run("cp %s.tmp %s" % (LEASE_FILE, LEASE_FILE))
-    self.service_manager.restart(SERVICE_DNSMASQ)
-    time.sleep(9)
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("cp %s %s.tmp" % (LEASE_FILE, LEASE_FILE))
 
+    def cleanup_wireless_settings(self):
+        """Reset wireless settings to default."""
+        self.ssh.run("wifi down")
+        self.ssh.run("rm -f /etc/config/wireless")
+        self.ssh.run("wifi config")
+        if self.channel_5g == 132:
+            self.ssh.run("iw reg set US")
+        self.ssh.run("cp %s.tmp %s" % (LEASE_FILE, LEASE_FILE))
+        self.service_manager.restart(SERVICE_DNSMASQ)
+        time.sleep(9)
diff --git a/src/antlion/controllers/packet_capture.py b/src/antlion/controllers/packet_capture.py
index 706f9c4..ce3d8fd 100755
--- a/src/antlion/controllers/packet_capture.py
+++ b/src/antlion/controllers/packet_capture.py
@@ -14,39 +14,36 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import os
+import threading
+import time
+
 from antlion import logger
-from antlion.controllers.ap_lib.hostapd_constants import AP_DEFAULT_CHANNEL_2G
-from antlion.controllers.ap_lib.hostapd_constants import AP_DEFAULT_CHANNEL_5G
-from antlion.controllers.ap_lib.hostapd_constants import CHANNEL_MAP
 from antlion.controllers.ap_lib.hostapd_constants import FREQUENCY_MAP
 from antlion.controllers.ap_lib.hostapd_constants import CENTER_CHANNEL_MAP
 from antlion.controllers.ap_lib.hostapd_constants import VHT_CHANNEL
 from antlion.controllers.utils_lib.ssh import connection
 from antlion.controllers.utils_lib.ssh import formatter
 from antlion.controllers.utils_lib.ssh import settings
-from antlion.libs.logging import log_stream
 from antlion.libs.proc.process import Process
-from antlion import asserts
 
-import os
-import threading
-import time
+from mobly import asserts
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'PacketCapture'
-ACTS_CONTROLLER_REFERENCE_NAME = 'packet_capture'
-BSS = 'BSS'
-BSSID = 'BSSID'
-FREQ = 'freq'
-FREQUENCY = 'frequency'
-LEVEL = 'level'
-MON_2G = 'mon0'
-MON_5G = 'mon1'
-BAND_IFACE = {'2G': MON_2G, '5G': MON_5G}
-SCAN_IFACE = 'wlan2'
+MOBLY_CONTROLLER_CONFIG_NAME = "PacketCapture"
+ACTS_CONTROLLER_REFERENCE_NAME = "packet_capture"
+BSS = "BSS"
+BSSID = "BSSID"
+FREQ = "freq"
+FREQUENCY = "frequency"
+LEVEL = "level"
+MON_2G = "mon0"
+MON_5G = "mon1"
+BAND_IFACE = {"2G": MON_2G, "5G": MON_5G}
+SCAN_IFACE = "wlan2"
 SCAN_TIMEOUT = 60
-SEP = ':'
-SIGNAL = 'signal'
-SSID = 'SSID'
+SEP = ":"
+SIGNAL = "signal"
+SSID = "SSID"
 
 
 def create(configs):
@@ -100,19 +97,21 @@
         Args:
             configs: config for the packet capture.
         """
-        self.ssh_settings = settings.from_config(configs['ssh_config'])
+        self.ssh_settings = settings.from_config(configs["ssh_config"])
         self.ssh = connection.SshConnection(self.ssh_settings)
-        self.log = logger.create_logger(lambda msg: '[%s|%s] %s' % (
-            MOBLY_CONTROLLER_CONFIG_NAME, self.ssh_settings.hostname, msg))
+        self.log = logger.create_logger(
+            lambda msg: "[%s|%s] %s"
+            % (MOBLY_CONTROLLER_CONFIG_NAME, self.ssh_settings.hostname, msg)
+        )
 
-        self._create_interface(MON_2G, 'monitor')
-        self._create_interface(MON_5G, 'monitor')
+        self._create_interface(MON_2G, "monitor")
+        self._create_interface(MON_5G, "monitor")
         self.managed_mode = True
-        result = self.ssh.run('ifconfig -a', ignore_status=True)
+        result = self.ssh.run("ifconfig -a", ignore_status=True)
         if result.stderr or SCAN_IFACE not in result.stdout:
             self.managed_mode = False
         if self.managed_mode:
-            self._create_interface(SCAN_IFACE, 'managed')
+            self._create_interface(SCAN_IFACE, "managed")
 
         self.pcap_properties = dict()
         self._pcap_stop_lock = threading.Lock()
@@ -122,26 +121,24 @@
 
         Create mon0/mon1 for 2G/5G monitor mode and wlan2 for managed mode.
         """
-        if mode == 'monitor':
-            self.ssh.run('ifconfig wlan%s down' % iface[-1],
-                         ignore_status=True)
-        self.ssh.run('iw dev %s del' % iface, ignore_status=True)
-        self.ssh.run('iw phy%s interface add %s type %s' %
-                     (iface[-1], iface, mode),
-                     ignore_status=True)
-        self.ssh.run('ip link set %s up' % iface, ignore_status=True)
-        result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
+        if mode == "monitor":
+            self.ssh.run("ifconfig wlan%s down" % iface[-1], ignore_status=True)
+        self.ssh.run("iw dev %s del" % iface, ignore_status=True)
+        self.ssh.run(
+            "iw phy%s interface add %s type %s" % (iface[-1], iface, mode),
+            ignore_status=True,
+        )
+        self.ssh.run("ip link set %s up" % iface, ignore_status=True)
+        result = self.ssh.run("iw dev %s info" % iface, ignore_status=True)
         if result.stderr or iface not in result.stdout:
-            raise PacketCaptureError('Failed to configure interface %s' %
-                                     iface)
+            raise PacketCaptureError("Failed to configure interface %s" % iface)
 
     def _cleanup_interface(self, iface):
         """Clean up monitor mode interfaces."""
-        self.ssh.run('iw dev %s del' % iface, ignore_status=True)
-        result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
-        if not result.stderr or 'No such device' not in result.stderr:
-            raise PacketCaptureError('Failed to cleanup monitor mode for %s' %
-                                     iface)
+        self.ssh.run("iw dev %s del" % iface, ignore_status=True)
+        result = self.ssh.run("iw dev %s info" % iface, ignore_status=True)
+        if not result.stderr or "No such device" not in result.stderr:
+            raise PacketCaptureError("Failed to cleanup monitor mode for %s" % iface)
 
     def _parse_scan_results(self, scan_result):
         """Parses the scan dump output and returns list of dictionaries.
@@ -163,7 +160,7 @@
             if SEP not in line:
                 continue
             if BSS in line:
-                network[BSSID] = line.split('(')[0].split()[-1]
+                network[BSSID] = line.split("(")[0].split()[-1]
             field, value = line.lstrip().rstrip().split(SEP)[0:2]
             value = value.lstrip()
             if SIGNAL in line:
@@ -183,10 +180,10 @@
             List of dictionaries each representing a found network.
         """
         if not self.managed_mode:
-            raise PacketCaptureError('Managed mode not setup')
-        result = self.ssh.run('iw dev %s scan' % SCAN_IFACE)
+            raise PacketCaptureError("Managed mode not setup")
+        result = self.ssh.run("iw dev %s scan" % SCAN_IFACE)
         if result.stderr:
-            raise PacketCaptureError('Failed to get scan dump')
+            raise PacketCaptureError("Failed to get scan dump")
         if not result.stdout:
             return []
         return self._parse_scan_results(result.stdout)
@@ -224,28 +221,29 @@
 
         band = band.upper()
         if band not in BAND_IFACE:
-            self.log.error('Invalid band. Must be 2g/2G or 5g/5G')
+            self.log.error("Invalid band. Must be 2g/2G or 5g/5G")
             return False
 
         iface = BAND_IFACE[band]
         if bandwidth == 20:
-            self.ssh.run('iw dev %s set channel %s' % (iface, channel),
-                         ignore_status=True)
+            self.ssh.run(
+                "iw dev %s set channel %s" % (iface, channel), ignore_status=True
+            )
         else:
             center_freq = None
             for i, j in CENTER_CHANNEL_MAP[VHT_CHANNEL[bandwidth]]["channels"]:
                 if channel in range(i, j + 1):
                     center_freq = (FREQUENCY_MAP[i] + FREQUENCY_MAP[j]) / 2
                     break
-            asserts.assert_true(center_freq,
-                                "No match channel in VHT channel list.")
+            asserts.assert_true(center_freq, "No match channel in VHT channel list.")
             self.ssh.run(
-                'iw dev %s set freq %s %s %s' %
-                (iface, FREQUENCY_MAP[channel], bandwidth, center_freq),
-                ignore_status=True)
+                "iw dev %s set freq %s %s %s"
+                % (iface, FREQUENCY_MAP[channel], bandwidth, center_freq),
+                ignore_status=True,
+            )
 
-        result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
-        if result.stderr or 'channel %s' % channel not in result.stdout:
+        result = self.ssh.run("iw dev %s info" % iface, ignore_status=True)
+        if result.stderr or "channel %s" % channel not in result.stdout:
             self.log.error("Failed to configure monitor mode for %s" % band)
             return False
         return True
@@ -269,22 +267,19 @@
             self.log.error("Invalid band or packet capture already running")
             return None
 
-        pcap_name = '%s_%s.pcap' % (pcap_fname, band)
+        pcap_name = "%s_%s.pcap" % (pcap_fname, band)
         pcap_fname = os.path.join(log_path, pcap_name)
-        pcap_file = open(pcap_fname, 'w+b')
+        pcap_file = open(pcap_fname, "w+b")
 
-        tcpdump_cmd = 'tcpdump -i %s -w - -U 2>/dev/null' % (BAND_IFACE[band])
-        cmd = formatter.SshFormatter().format_command(tcpdump_cmd,
-                                                      None,
-                                                      self.ssh_settings,
-                                                      extra_flags={'-q': None})
+        tcpdump_cmd = "tcpdump -i %s -w - -U 2>/dev/null" % (BAND_IFACE[band])
+        cmd = formatter.SshFormatter().format_command(
+            tcpdump_cmd, None, self.ssh_settings, extra_flags={"-q": None}
+        )
         pcap_proc = Process(cmd)
-        pcap_proc.set_on_output_callback(lambda msg: pcap_file.write(msg),
-                                         binary=True)
+        pcap_proc.set_on_output_callback(lambda msg: pcap_file.write(msg), binary=True)
         pcap_proc.start()
 
-        self.pcap_properties[band] = PcapProperties(pcap_proc, pcap_fname,
-                                                    pcap_file)
+        self.pcap_properties[band] = PcapProperties(pcap_proc, pcap_fname, pcap_file)
         return pcap_proc
 
     def stop_packet_capture(self, proc):
diff --git a/src/antlion/controllers/packet_sender.py b/src/antlion/controllers/packet_sender.py
index ddd988c..da22e79 100644
--- a/src/antlion/controllers/packet_sender.py
+++ b/src/antlion/controllers/packet_sender.py
@@ -27,32 +27,32 @@
 # On ubuntu, sudo pip3 install scapy
 import scapy.all as scapy
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'PacketSender'
-ACTS_CONTROLLER_REFERENCE_NAME = 'packet_senders'
+MOBLY_CONTROLLER_CONFIG_NAME = "PacketSender"
+ACTS_CONTROLLER_REFERENCE_NAME = "packet_senders"
 
-GET_FROM_LOCAL_INTERFACE = 'get_local'
-MAC_BROADCAST = 'ff:ff:ff:ff:ff:ff'
-IPV4_BROADCAST = '255.255.255.255'
-ARP_DST = '00:00:00:00:00:00'
-RA_MAC = '33:33:00:00:00:01'
-RA_IP = 'ff02::1'
-RA_PREFIX = 'd00d::'
+GET_FROM_LOCAL_INTERFACE = "get_local"
+MAC_BROADCAST = "ff:ff:ff:ff:ff:ff"
+IPV4_BROADCAST = "255.255.255.255"
+ARP_DST = "00:00:00:00:00:00"
+RA_MAC = "33:33:00:00:00:01"
+RA_IP = "ff02::1"
+RA_PREFIX = "d00d::"
 RA_PREFIX_LEN = 64
 DHCP_OFFER_OP = 2
 DHCP_OFFER_SRC_PORT = 67
 DHCP_OFFER_DST_PORT = 68
 DHCP_TRANS_ID = 0x01020304
 DNS_LEN = 3
-PING6_DATA = 'BEST PING6 EVER'
+PING6_DATA = "BEST PING6 EVER"
 PING4_TYPE = 8
 MDNS_TTL = 255
-MDNS_QTYPE = 'PTR'
+MDNS_QTYPE = "PTR"
 MDNS_UDP_PORT = 5353
-MDNS_V4_IP_DST = '224.0.0.251'
-MDNS_V4_MAC_DST = '01:00:5E:00:00:FB'
+MDNS_V4_IP_DST = "224.0.0.251"
+MDNS_V4_MAC_DST = "01:00:5E:00:00:FB"
 MDNS_RECURSIVE = 1
-MDNS_V6_IP_DST = 'FF02::FB'
-MDNS_V6_MAC_DST = '33:33:00:00:00:FB'
+MDNS_V6_IP_DST = "FF02::FB"
+MDNS_V6_MAC_DST = "33:33:00:00:00:FB"
 ETH_TYPE_IP = 2048
 SAP_SPANNING_TREE = 0x42
 SNAP_OUI = 12
@@ -118,18 +118,18 @@
         self.log = log
 
     def run(self):
-        self.log.info('Packet Sending Started.')
+        self.log.info("Packet Sending Started.")
         while True:
             if self.stop_signal.is_set():
                 # Poison pill means shutdown
-                self.log.info('Packet Sending Stopped.')
+                self.log.info("Packet Sending Stopped.")
                 break
 
             try:
                 scapy.sendp(self.packet, iface=self.interface, verbose=0)
                 time.sleep(self.interval)
             except Exception:
-                self.log.exception('Exception when trying to send packet')
+                self.log.exception("Exception when trying to send packet")
                 return
 
         return
@@ -173,14 +173,15 @@
         """
         if packet is None:
             raise PacketSenderError(
-                'There is no packet to send. Create a packet first.')
+                "There is no packet to send. Create a packet first."
+            )
 
         for _ in range(ntimes):
             try:
                 scapy.sendp(packet, iface=self.interface, verbose=0)
                 time.sleep(interval)
             except socket.error as excpt:
-                self.log.exception('Caught socket exception : %s' % excpt)
+                self.log.exception("Caught socket exception : %s" % excpt)
                 return
 
     def send_receive_ntimes(self, packet, ntimes, interval):
@@ -194,15 +195,15 @@
         """
         if packet is None:
             raise PacketSenderError(
-                'There is no packet to send. Create a packet first.')
+                "There is no packet to send. Create a packet first."
+            )
 
         for _ in range(ntimes):
             try:
-                scapy.srp1(
-                    packet, iface=self.interface, timeout=interval, verbose=0)
+                scapy.srp1(packet, iface=self.interface, timeout=interval, verbose=0)
                 time.sleep(interval)
             except socket.error as excpt:
-                self.log.exception('Caught socket exception : %s' % excpt)
+                self.log.exception("Caught socket exception : %s" % excpt)
                 return
 
     def start_sending(self, packet, interval):
@@ -217,28 +218,32 @@
         """
         if packet is None:
             raise PacketSenderError(
-                'There is no packet to send. Create a packet first.')
+                "There is no packet to send. Create a packet first."
+            )
 
         if self.thread_active:
             raise PacketSenderError(
-                ('There is already an active thread. Stop it'
-                 'before starting another transmission.'))
+                (
+                    "There is already an active thread. Stop it"
+                    "before starting another transmission."
+                )
+            )
 
-        self.thread_send = ThreadSendPacket(self.stop_signal, packet, interval,
-                                            self.interface, self.log)
+        self.thread_send = ThreadSendPacket(
+            self.stop_signal, packet, interval, self.interface, self.log
+        )
         self.thread_send.start()
         self.thread_active = True
 
     def stop_sending(self, ignore_status=False):
-        """Stops the concurrent thread that is continuously sending packets.
-
-       """
+        """Stops the concurrent thread that is continuously sending packets."""
         if not self.thread_active:
             if ignore_status:
                 return
             else:
                 raise PacketSenderError(
-                    'Error: There is no acive thread running to stop.')
+                    "Error: There is no acive thread running to stop."
+                )
 
         # Stop thread
         self.stop_signal.set()
@@ -247,7 +252,7 @@
         # Just as precaution
         if self.thread_send.is_alive():
             self.thread_send.terminate()
-            self.log.warning('Packet Sending forced to terminate')
+            self.log.warning("Packet Sending forced to terminate")
 
         self.stop_signal.clear()
         self.thread_send = None
@@ -276,26 +281,28 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv4 = config_params['dst_ipv4']
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv4 = config_params["dst_ipv4"]
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv4 = scapy.get_if_addr(interf)
         else:
-            self.src_ipv4 = config_params['src_ipv4']
+            self.src_ipv4 = config_params["src_ipv4"]
 
-    def generate(self,
-                 op='who-has',
-                 ip_dst=None,
-                 ip_src=None,
-                 hwsrc=None,
-                 hwdst=None,
-                 eth_dst=None):
+    def generate(
+        self,
+        op="who-has",
+        ip_dst=None,
+        ip_src=None,
+        hwsrc=None,
+        hwdst=None,
+        eth_dst=None,
+    ):
         """Generates a custom ARP packet.
 
         Args:
@@ -307,15 +314,14 @@
             eth_dst: Ethernet (layer 2) destination address (Optional)
         """
         # Create IP layer
-        hw_src = (hwsrc if hwsrc is not None else self.src_mac)
-        hw_dst = (hwdst if hwdst is not None else ARP_DST)
-        ipv4_dst = (ip_dst if ip_dst is not None else self.dst_ipv4)
-        ipv4_src = (ip_src if ip_src is not None else self.src_ipv4)
-        ip4 = scapy.ARP(
-            op=op, pdst=ipv4_dst, psrc=ipv4_src, hwdst=hw_dst, hwsrc=hw_src)
+        hw_src = hwsrc if hwsrc is not None else self.src_mac
+        hw_dst = hwdst if hwdst is not None else ARP_DST
+        ipv4_dst = ip_dst if ip_dst is not None else self.dst_ipv4
+        ipv4_src = ip_src if ip_src is not None else self.src_ipv4
+        ip4 = scapy.ARP(op=op, pdst=ipv4_dst, psrc=ipv4_src, hwdst=hw_dst, hwsrc=hw_src)
 
         # Create Ethernet layer
-        mac_dst = (eth_dst if eth_dst is not None else MAC_BROADCAST)
+        mac_dst = eth_dst if eth_dst is not None else MAC_BROADCAST
         ethernet = scapy.Ether(src=self.src_mac, dst=mac_dst)
 
         self.packet = ethernet / ip4
@@ -347,22 +353,22 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        self.subnet_mask = config_params['subnet_mask']
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        self.subnet_mask = config_params["subnet_mask"]
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv4 = config_params['dst_ipv4']
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv4 = config_params["dst_ipv4"]
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv4 = scapy.get_if_addr(interf)
         else:
-            self.src_ipv4 = config_params['src_ipv4']
+            self.src_ipv4 = config_params["src_ipv4"]
 
-        self.gw_ipv4 = config_params['gw_ipv4']
+        self.gw_ipv4 = config_params["gw_ipv4"]
 
     def generate(self, cha_mac=None, dst_ip=None):
         """Generates a DHCP offer packet.
@@ -373,16 +379,18 @@
         """
 
         # Create DHCP layer
-        dhcp = scapy.DHCP(options=[
-            ('message-type', 'offer'),
-            ('subnet_mask', self.subnet_mask),
-            ('server_id', self.src_ipv4),
-            ('end'),
-        ])
+        dhcp = scapy.DHCP(
+            options=[
+                ("message-type", "offer"),
+                ("subnet_mask", self.subnet_mask),
+                ("server_id", self.src_ipv4),
+                ("end"),
+            ]
+        )
 
         # Overwrite standard DHCP fields
-        sta_hw = (cha_mac if cha_mac is not None else self.dst_mac)
-        sta_ip = (dst_ip if dst_ip is not None else self.dst_ipv4)
+        sta_hw = cha_mac if cha_mac is not None else self.dst_mac
+        sta_ip = dst_ip if dst_ip is not None else self.dst_ipv4
 
         # Create Boot
         bootp = scapy.BOOTP(
@@ -391,7 +399,8 @@
             siaddr=self.src_ipv4,
             giaddr=self.gw_ipv4,
             chaddr=scapy.mac2str(sta_hw),
-            xid=DHCP_TRANS_ID)
+            xid=DHCP_TRANS_ID,
+        )
 
         # Create UDP
         udp = scapy.UDP(sport=DHCP_OFFER_SRC_PORT, dport=DHCP_OFFER_DST_PORT)
@@ -429,19 +438,19 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv6 = config_params['dst_ipv6']
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv6 = config_params["dst_ipv6"]
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
         else:
-            self.src_ipv6 = config_params['src_ipv6']
+            self.src_ipv6 = config_params["src_ipv6"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a Neighbor Solicitation (NS) packet (ICMP over IPv6).
@@ -456,8 +465,7 @@
         nnode_mcast = scapy.in6_getnsma(ndst_ip)
         node_mcast = socket.inet_ntop(socket.AF_INET6, nnode_mcast)
         # Compute MAC addresses
-        hw_dst = (eth_dst
-                  if eth_dst is not None else scapy.in6_getnsmac(nnode_mcast))
+        hw_dst = eth_dst if eth_dst is not None else scapy.in6_getnsmac(nnode_mcast)
 
         # Create IPv6 layer
         base = scapy.IPv6(dst=node_mcast, src=self.src_ipv6)
@@ -494,25 +502,22 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
         else:
-            self.src_ipv6 = config_params['src_ipv6']
+            self.src_ipv6 = config_params["src_ipv6"]
 
-    def generate(self,
-                 lifetime,
-                 enableDNS=False,
-                 dns_lifetime=0,
-                 ip_dst=None,
-                 eth_dst=None):
+    def generate(
+        self, lifetime, enableDNS=False, dns_lifetime=0, ip_dst=None, eth_dst=None
+    ):
         """Generates a Router Advertisement (RA) packet (ICMP over IPv6).
 
         Args:
@@ -523,18 +528,18 @@
             eth_dst: Ethernet (layer 2) destination address (Optional)
         """
         # Overwrite standard fields if desired
-        ip6_dst = (ip_dst if ip_dst is not None else RA_IP)
-        hw_dst = (eth_dst if eth_dst is not None else RA_MAC)
+        ip6_dst = ip_dst if ip_dst is not None else RA_IP
+        hw_dst = eth_dst if eth_dst is not None else RA_MAC
 
         # Create IPv6 layer
         base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
         router_solicitation = scapy.ICMPv6ND_RA(routerlifetime=lifetime)
         src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac)
-        prefix = scapy.ICMPv6NDOptPrefixInfo(
-            prefixlen=RA_PREFIX_LEN, prefix=RA_PREFIX)
+        prefix = scapy.ICMPv6NDOptPrefixInfo(prefixlen=RA_PREFIX_LEN, prefix=RA_PREFIX)
         if enableDNS:
             rndss = scapy.ICMPv6NDOptRDNSS(
-                lifetime=dns_lifetime, dns=[self.src_ipv6], len=DNS_LEN)
+                lifetime=dns_lifetime, dns=[self.src_ipv6], len=DNS_LEN
+            )
             ip6 = base / router_solicitation / src_ll_addr / prefix / rndss
         else:
             ip6 = base / router_solicitation / src_ll_addr / prefix
@@ -570,20 +575,20 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv6 = config_params['dst_ipv6']
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv6 = config_params["dst_ipv6"]
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
         else:
-            self.src_ipv6 = config_params['src_ipv6']
+            self.src_ipv6 = config_params["src_ipv6"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a Ping6 packet (i.e., Echo Request)
@@ -593,8 +598,8 @@
             eth_dst: Ethernet (layer 2) destination address (Optional)
         """
         # Overwrite standard fields if desired
-        ip6_dst = (ip_dst if ip_dst is not None else self.dst_ipv6)
-        hw_dst = (eth_dst if eth_dst is not None else self.dst_mac)
+        ip6_dst = ip_dst if ip_dst is not None else self.dst_ipv6
+        hw_dst = eth_dst if eth_dst is not None else self.dst_mac
 
         # Create IPv6 layer
         base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
@@ -632,19 +637,19 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.dst_ipv4 = config_params['dst_ipv4']
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_ipv4 = config_params["dst_ipv4"]
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv4 = scapy.get_if_addr(interf)
         else:
-            self.src_ipv4 = config_params['src_ipv4']
+            self.src_ipv4 = config_params["src_ipv4"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a Ping4 packet (i.e., Echo Request)
@@ -655,8 +660,8 @@
         """
 
         # Overwrite standard fields if desired
-        sta_ip = (ip_dst if ip_dst is not None else self.dst_ipv4)
-        sta_hw = (eth_dst if eth_dst is not None else self.dst_mac)
+        sta_ip = ip_dst if ip_dst is not None else self.dst_ipv4
+        sta_hw = eth_dst if eth_dst is not None else self.dst_mac
 
         # Create IPv6 layer
         base = scapy.IP(src=self.src_ipv4, dst=sta_ip)
@@ -693,18 +698,18 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
         else:
-            self.src_ipv6 = config_params['src_ipv6']
+            self.src_ipv6 = config_params["src_ipv6"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a mDNS v6 packet for multicast DNS config
@@ -715,8 +720,8 @@
         """
 
         # Overwrite standard fields if desired
-        sta_ip = (ip_dst if ip_dst is not None else MDNS_V6_IP_DST)
-        sta_hw = (eth_dst if eth_dst is not None else MDNS_V6_MAC_DST)
+        sta_ip = ip_dst if ip_dst is not None else MDNS_V6_IP_DST
+        sta_hw = eth_dst if eth_dst is not None else MDNS_V6_MAC_DST
 
         # Create mDNS layer
         qdServer = scapy.DNSQR(qname=self.src_ipv6, qtype=MDNS_QTYPE)
@@ -756,17 +761,17 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
             self.src_ipv4 = scapy.get_if_addr(interf)
         else:
-            self.src_ipv4 = config_params['src_ipv4']
+            self.src_ipv4 = config_params["src_ipv4"]
 
     def generate(self, ip_dst=None, eth_dst=None):
         """Generates a mDNS v4 packet for multicast DNS config
@@ -777,8 +782,8 @@
         """
 
         # Overwrite standard fields if desired
-        sta_ip = (ip_dst if ip_dst is not None else MDNS_V4_IP_DST)
-        sta_hw = (eth_dst if eth_dst is not None else MDNS_V4_MAC_DST)
+        sta_ip = ip_dst if ip_dst is not None else MDNS_V4_IP_DST
+        sta_hw = eth_dst if eth_dst is not None else MDNS_V4_MAC_DST
 
         # Create mDNS layer
         qdServer = scapy.DNSQR(qname=self.src_ipv4, qtype=MDNS_QTYPE)
@@ -818,13 +823,13 @@
               The key can also be 'get_local' which means the code will read
               and use the local interface parameters
         """
-        interf = config_params['interf']
+        interf = config_params["interf"]
         self.packet = None
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
             self.src_mac = scapy.get_if_hwaddr(interf)
         else:
-            self.src_mac = config_params['src_mac']
+            self.src_mac = config_params["src_mac"]
 
     def _build_ether(self, eth_dst=None):
         """Creates the basic frame for 802.3
@@ -833,7 +838,7 @@
             eth_dst: Ethernet (layer 2) destination address (Optional)
         """
         # Overwrite standard fields if desired
-        sta_hw = (eth_dst if eth_dst is not None else self.dst_mac)
+        sta_hw = eth_dst if eth_dst is not None else self.dst_mac
         # Create Ethernet layer
         dot3_base = scapy.Dot3(src=self.src_mac, dst=sta_hw)
 
@@ -847,7 +852,7 @@
         """
         frame.len = PAD_LEN_BYTES
         pad = scapy.Padding()
-        pad.load = '\x00' * PAD_LEN_BYTES
+        pad.load = "\x00" * PAD_LEN_BYTES
         return frame / pad
 
     def generate(self, eth_dst=None):
@@ -881,13 +886,15 @@
         self.packet = self._pad_frame(ethernet / llc)
         return self.packet
 
-    def generate_snap(self,
-                      eth_dst=None,
-                      dsap=SNAP_DSAP,
-                      ssap=SNAP_SSAP,
-                      ctrl=SNAP_CTRL,
-                      oui=SNAP_OUI,
-                      code=ETH_TYPE_IP):
+    def generate_snap(
+        self,
+        eth_dst=None,
+        dsap=SNAP_DSAP,
+        ssap=SNAP_SSAP,
+        ctrl=SNAP_CTRL,
+        oui=SNAP_OUI,
+        code=ETH_TYPE_IP,
+    ):
         """Generates the 802.3 frame with LLC and SNAP and adds padding
 
         Args:
@@ -932,4 +939,3 @@
             return if_list[0]
 
     return None
-
diff --git a/src/antlion/controllers/pdu.py b/src/antlion/controllers/pdu.py
index d167afa..412742e 100644
--- a/src/antlion/controllers/pdu.py
+++ b/src/antlion/controllers/pdu.py
@@ -19,8 +19,8 @@
 
 from antlion import tracelogger
 
-MOBLY_CONTROLLER_CONFIG_NAME = 'PduDevice'
-ACTS_CONTROLLER_REFERENCE_NAME = 'pdu_devices'
+MOBLY_CONTROLLER_CONFIG_NAME = "PduDevice"
+ACTS_CONTROLLER_REFERENCE_NAME = "pdu_devices"
 
 
 def create(configs):
@@ -40,15 +40,15 @@
     if configs:
         pdus = []
         for config in configs:
-            device = config.get('device')
+            device = config.get("device")
             if not device:
                 raise PduError("Config must provide a device")
 
-            host = config.get('host')
+            host = config.get("host")
             if not device:
                 raise PduError("Config must provide a host ip address")
-            username = config.get('username')
-            password = config.get('password')
+            username = config.get("username")
+            password = config.get("password")
             pdu = _create_device(device, host, username, password)
             pdus.append(pdu)
         return pdus
@@ -77,11 +77,9 @@
     """
     info = []
     for pdu in pdu_list:
-        info.append({
-            'host': pdu.host,
-            'username': pdu.username,
-            'password': pdu.password
-        })
+        info.append(
+            {"host": pdu.host, "username": pdu.username, "password": pdu.password}
+        )
     return info
 
 
@@ -89,7 +87,7 @@
     """Factory method that returns an instance of PduDevice implementation
     based on the device string.
     """
-    module_name = 'antlion.controllers.pdu_lib.' + device
+    module_name = "antlion.controllers.pdu_lib." + device
     module = importlib.import_module(module_name)
     return module.PduDevice(host, username, password)
 
@@ -143,12 +141,12 @@
         ],
         ...
     """
-    pdu_ip = device_pdu_config['host']
-    port = device_pdu_config['port']
+    pdu_ip = device_pdu_config["host"]
+    port = device_pdu_config["port"]
     for pdu in pdus:
         if pdu.host == pdu_ip:
             return pdu, port
-    raise ValueError('No PduDevice with host: %s' % pdu_ip)
+    raise ValueError("No PduDevice with host: %s" % pdu_ip)
 
 
 class PduDevice(object):
@@ -163,8 +161,7 @@
 
     def __init__(self, host, username, password):
         if type(self) is PduDevice:
-            raise NotImplementedError(
-                "Base class: cannot be instantiated directly")
+            raise NotImplementedError("Base class: cannot be instantiated directly")
         self.host = host
         self.username = username
         self.password = password
diff --git a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py b/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
index 8a4c6d4..1154f95 100644
--- a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
+++ b/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
@@ -24,6 +24,7 @@
 # TODO(b/246999212): Explore alternatives to the dlipower package
 try:
     import dlipower
+
     HAS_IMPORT_DLIPOWER = True
 except ImportError:
     HAS_IMPORT_DLIPOWER = False
@@ -51,32 +52,34 @@
 
         if not HAS_IMPORT_DLIPOWER:
             raise signals.ControllerError(
-                'Digital Loggers PDUs are not supported with current installed '
-                'packages; install the dlipower package to add support')
+                "Digital Loggers PDUs are not supported with current installed "
+                "packages; install the dlipower package to add support"
+            )
 
-        self.power_switch = dlipower.PowerSwitch(hostname=host,
-                                                 userid=username,
-                                                 password=password)
+        self.power_switch = dlipower.PowerSwitch(
+            hostname=host, userid=username, password=password
+        )
         # Connection is made at command execution, this verifies the device
         # can be reached before continuing.
         if not self.power_switch.statuslist():
             raise pdu.PduError(
-                'Failed to connect get WebPowerSwitch status. Incorrect host, '
-                'userid, or password?')
+                "Failed to connect get WebPowerSwitch status. Incorrect host, "
+                "userid, or password?"
+            )
         else:
-            self.log.info('Connected to WebPowerSwitch (%s).' % host)
+            self.log.info("Connected to WebPowerSwitch (%s)." % host)
 
     def on_all(self):
         """Turn on power to all outlets."""
         for outlet in self.power_switch:
             outlet.on()
-            self._verify_state(outlet.name, 'ON')
+            self._verify_state(outlet.name, "ON")
 
     def off_all(self):
         """Turn off power to all outlets."""
         for outlet in self.power_switch:
             outlet.off()
-            self._verify_state(outlet.name, 'OFF')
+            self._verify_state(outlet.name, "OFF")
 
     def on(self, outlet):
         """Turn on power to given outlet
@@ -84,8 +87,8 @@
         Args:
             outlet: string or int, the outlet name/number
         """
-        self.power_switch.command_on_outlets('on', str(outlet))
-        self._verify_state(outlet, 'ON')
+        self.power_switch.command_on_outlets("on", str(outlet))
+        self._verify_state(outlet, "ON")
 
     def off(self, outlet):
         """Turn off power to given outlet
@@ -93,8 +96,8 @@
         Args:
             outlet: string or int, the outlet name/number
         """
-        self.power_switch.command_on_outlets('off', str(outlet))
-        self._verify_state(outlet, 'OFF')
+        self.power_switch.command_on_outlets("off", str(outlet))
+        self._verify_state(outlet, "OFF")
 
     def reboot(self, outlet):
         """Cycle the given outlet to OFF and back ON.
@@ -102,11 +105,11 @@
         Args:
             outlet: string or int, the outlet name/number
         """
-        self.power_switch.command_on_outlets('cycle', str(outlet))
-        self._verify_state(outlet, 'ON')
+        self.power_switch.command_on_outlets("cycle", str(outlet))
+        self._verify_state(outlet, "ON")
 
     def status(self):
-        """ Return the status of the switch outlets.
+        """Return the status of the switch outlets.
 
         Return:
             a dict mapping outlet string numbers to:
@@ -114,7 +117,7 @@
                 False if outlet is OFF
         """
         status_list = self.power_switch.statuslist()
-        return {str(outlet): state == 'ON' for outlet, _, state in status_list}
+        return {str(outlet): state == "ON" for outlet, _, state in status_list}
 
     def close(self):
         # Since there isn't a long-running connection, close is not applicable.
@@ -141,10 +144,11 @@
             if actual_state == expected_state:
                 return
             else:
-                self.log.debug('Outlet %s not yet in state %s' %
-                               (outlet, expected_state))
+                self.log.debug(
+                    "Outlet %s not yet in state %s" % (outlet, expected_state)
+                )
         raise pdu.PduError(
-            'Outlet %s on WebPowerSwitch (%s) failed to reach expected state. \n'
-            'Expected State: %s\n'
-            'Actual State: %s' %
-            (outlet, self.host, expected_state, actual_state))
+            "Outlet %s on WebPowerSwitch (%s) failed to reach expected state. \n"
+            "Expected State: %s\n"
+            "Actual State: %s" % (outlet, self.host, expected_state, actual_state)
+        )
diff --git a/src/antlion/controllers/pdu_lib/synaccess/np02b.py b/src/antlion/controllers/pdu_lib/synaccess/np02b.py
index ccc7e74..70624f0 100644
--- a/src/antlion/controllers/pdu_lib/synaccess/np02b.py
+++ b/src/antlion/controllers/pdu_lib/synaccess/np02b.py
@@ -26,40 +26,41 @@
     """Implementation of pure abstract PduDevice object for the Synaccess np02b
     Pdu.
     """
+
     def __init__(self, host, username, password):
         super(PduDevice, self).__init__(host, username, password)
         self.tnhelper = _TNHelperNP02B(host)
 
     def on_all(self):
-        """ Turns on both outlets on the np02b."""
-        self.tnhelper.cmd('ps 1')
-        self._verify_state({'1': True, '2': True})
+        """Turns on both outlets on the np02b."""
+        self.tnhelper.cmd("ps 1")
+        self._verify_state({"1": True, "2": True})
 
     def off_all(self):
-        """ Turns off both outlets on the np02b."""
-        self.tnhelper.cmd('ps 0')
-        self._verify_state({'1': False, '2': False})
+        """Turns off both outlets on the np02b."""
+        self.tnhelper.cmd("ps 0")
+        self._verify_state({"1": False, "2": False})
 
     def on(self, outlet):
-        """ Turns on specific outlet on the np02b.
+        """Turns on specific outlet on the np02b.
 
         Args:
             outlet: string of the outlet to turn on ('1' or '2')
         """
-        self.tnhelper.cmd('pset %s 1' % outlet)
+        self.tnhelper.cmd("pset %s 1" % outlet)
         self._verify_state({outlet: True})
 
     def off(self, outlet):
-        """ Turns off a specifc outlet on the np02b.
+        """Turns off a specifc outlet on the np02b.
 
         Args:
             outlet: string of the outlet to turn off ('1' or '2')
         """
-        self.tnhelper.cmd('pset %s 0' % outlet)
+        self.tnhelper.cmd("pset %s 0" % outlet)
         self._verify_state({outlet: False})
 
     def reboot(self, outlet):
-        """ Toggles a specific outlet on the np02b to off, then to on.
+        """Toggles a specific outlet on the np02b to off, then to on.
 
         Args:
             outlet: string of the outlet to reboot ('1' or '2')
@@ -70,18 +71,18 @@
         self._verify_state({outlet: True})
 
     def status(self):
-        """ Returns the status of the np02b outlets.
+        """Returns the status of the np02b outlets.
 
         Return:
             a dict mapping outlet strings ('1' and '2') to:
                 True if outlet is ON
                 False if outlet is OFF
         """
-        res = self.tnhelper.cmd('pshow')
-        status_list = re.findall('(ON|OFF)', res)
+        res = self.tnhelper.cmd("pshow")
+        status_list = re.findall("(ON|OFF)", res)
         status_dict = {}
         for i, status in enumerate(status_list):
-            status_dict[str(i + 1)] = (status == 'ON')
+            status_dict[str(i + 1)] = status == "ON"
         return status_dict
 
     def close(self):
@@ -118,10 +119,12 @@
             actual_state = self.status()
             if expected_state.items() <= actual_state.items():
                 return True
-            time.sleep(.1)
-        raise pdu.PduError('Timeout while verifying state.\n'
-                           'Expected State: %s\n'
-                           'Actual State: %s' % (expected_state, actual_state))
+            time.sleep(0.1)
+        raise pdu.PduError(
+            "Timeout while verifying state.\n"
+            "Expected State: %s\n"
+            "Actual State: %s" % (expected_state, actual_state)
+        )
 
 
 class _TNHelperNP02B(object):
@@ -129,12 +132,13 @@
     helper is specific to the idiosyncrasies of the NP02B and therefore should
     not be used with other devices.
     """
+
     def __init__(self, host):
         self._tn = telnetlib.Telnet()
         self.host = host
-        self.tx_cmd_separator = '\n\r'
-        self.rx_cmd_separator = '\r\n'
-        self.prompt = '>'
+        self.tx_cmd_separator = "\n\r"
+        self.rx_cmd_separator = "\r\n"
+        self.prompt = ">"
 
     """
     Executes a command on the device via telnet.
@@ -149,14 +153,13 @@
         try:
             self._tn.open(self.host, timeout=3)
         except:
-            raise pdu.PduError("Failed to open telnet session to host (%s)" %
-                               self.host)
-        time.sleep(.1)
+            raise pdu.PduError("Failed to open telnet session to host (%s)" % self.host)
+        time.sleep(0.1)
 
         # Read to end of first prompt
         cmd_str.strip(self.tx_cmd_separator)
         self._tn.read_eager()
-        time.sleep(.1)
+        time.sleep(0.1)
 
         # Write command and read all output text
         self._tn.write(utils.ascii_string(cmd_str + self.tx_cmd_separator))
@@ -166,12 +169,12 @@
         if res is None:
             raise pdu.PduError("Command failed: %s" % cmd_str)
         res = res.decode()
-        if re.search('Invalid', res):
+        if re.search("Invalid", res):
             raise pdu.PduError("Command Invalid: %s" % cmd_str)
-        res = res.replace(self.prompt, '')
-        res = res.replace(self.tx_cmd_separator, '')
-        res = res.replace(self.rx_cmd_separator, '')
-        res = res.replace(cmd_str, '')
+        res = res.replace(self.prompt, "")
+        res = res.replace(self.tx_cmd_separator, "")
+        res = res.replace(self.rx_cmd_separator, "")
+        res = res.replace(cmd_str, "")
 
         # Close session
         self._tn.close()
diff --git a/src/antlion/controllers/power_metrics.py b/src/antlion/controllers/power_metrics.py
deleted file mode 100644
index 0c306ce..0000000
--- a/src/antlion/controllers/power_metrics.py
+++ /dev/null
@@ -1,339 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-import numpy as np
-
-# Metrics timestamp keys
-START_TIMESTAMP = 'start'
-END_TIMESTAMP = 'end'
-
-# Unit type constants
-CURRENT = 'current'
-POWER = 'power'
-TIME = 'time'
-VOLTAGE = 'voltage'
-
-# Unit constants
-MILLIVOLT = 'mV'
-VOLT = 'V'
-MILLIAMP = 'mA'
-AMP = 'A'
-AMPERE = AMP
-MILLIWATT = 'mW'
-WATT = 'W'
-MILLISECOND = 'ms'
-SECOND = 's'
-MINUTE = 'm'
-HOUR = 'h'
-
-CONVERSION_TABLES = {
-    CURRENT: {
-        MILLIAMP: 0.001,
-        AMP: 1
-    },
-    POWER: {
-        MILLIWATT: 0.001,
-        WATT: 1
-    },
-    TIME: {
-        MILLISECOND: 0.001,
-        SECOND: 1,
-        MINUTE: 60,
-        HOUR: 3600
-    },
-    VOLTAGE: {
-        MILLIVOLT: 0.001,
-        VOLT : 1
-    }
-}
-
-
-class Metric(object):
-    """Base class for describing power measurement values. Each object contains
-    an value and a unit. Enables some basic arithmetic operations with other
-    measurements of the same unit type.
-
-    Attributes:
-        value: Numeric value of the measurement
-        _unit_type: Unit type of the measurement (e.g. current, power)
-        unit: Unit of the measurement (e.g. W, mA)
-    """
-
-    def __init__(self, value, unit_type, unit, name=None):
-        if unit_type not in CONVERSION_TABLES:
-            raise TypeError(
-                '%s is not a valid unit type, valid unit types are %s' % (
-                    unit_type, str(CONVERSION_TABLES.keys)))
-        self.value = value
-        self.unit = unit
-        self.name = name
-        self._unit_type = unit_type
-
-    # Convenience constructor methods
-    @staticmethod
-    def amps(amps, name=None):
-        """Create a new current measurement, in amps."""
-        return Metric(amps, CURRENT, AMP, name=name)
-
-    @staticmethod
-    def watts(watts, name=None):
-        """Create a new power measurement, in watts."""
-        return Metric(watts, POWER, WATT, name=name)
-
-    @staticmethod
-    def seconds(seconds, name=None):
-        """Create a new time measurement, in seconds."""
-        return Metric(seconds, TIME, SECOND, name=name)
-
-    # Comparison methods
-
-    def __eq__(self, other):
-        return self.value == other.to_unit(self.unit).value
-
-    def __lt__(self, other):
-        return self.value < other.to_unit(self.unit).value
-
-    def __le__(self, other):
-        return self == other or self < other
-
-    # Addition and subtraction with other measurements
-
-    def __add__(self, other):
-        """Adds measurements of compatible unit types. The result will be in the
-        same units as self.
-        """
-        return Metric(self.value + other.to_unit(self.unit).value,
-                      self._unit_type, self.unit, name=self.name)
-
-    def __sub__(self, other):
-        """Subtracts measurements of compatible unit types. The result will be
-        in the same units as self.
-        """
-        return Metric(self.value - other.to_unit(self.unit).value,
-                      self._unit_type, self.unit, name=self.name)
-
-    # String representation
-
-    def __str__(self):
-        return '%g%s' % (self.value, self.unit)
-
-    def __repr__(self):
-        return str(self)
-
-    def to_unit(self, new_unit):
-        """Create an equivalent measurement under a different unit.
-        e.g. 0.5W -> 500mW
-
-        Args:
-            new_unit: Target unit. Must be compatible with current unit.
-
-        Returns: A new measurement with the converted value and unit.
-        """
-        try:
-            new_value = self.value * (
-                CONVERSION_TABLES[self._unit_type][self.unit] /
-                CONVERSION_TABLES[self._unit_type][new_unit])
-        except KeyError:
-            raise TypeError('Incompatible units: %s, %s' %
-                            (self.unit, new_unit))
-        return Metric(new_value, self._unit_type, new_unit, self.name)
-
-
-def import_raw_data(path):
-    """Create a generator from a Monsoon data file.
-
-    Args:
-        path: path to raw data file
-
-    Returns: generator that yields (timestamp, sample) per line
-    """
-    with open(path, 'r') as f:
-        for line in f:
-            time, sample = line.split()
-            yield float(time[:-1]), float(sample)
-
-
-def generate_percentiles(monsoon_file, timestamps, percentiles):
-    """Generates metrics .
-
-    Args:
-        monsoon_file: monsoon-like file where each line has two
-            numbers separated by a space, in the format:
-            seconds_since_epoch amperes
-            seconds_since_epoch amperes
-        timestamps: dict following the output format of
-            instrumentation_proto_parser.get_test_timestamps()
-        percentiles: percentiles to be returned
-    """
-    if timestamps is None:
-        timestamps = {}
-    test_starts = {}
-    test_ends = {}
-    for seg_name, times in timestamps.items():
-        if START_TIMESTAMP in times and END_TIMESTAMP in times:
-            test_starts[seg_name] = Metric(
-                times[START_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-            test_ends[seg_name] = Metric(
-                times[END_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-
-    arrays = {}
-    for seg_name in test_starts:
-        arrays[seg_name] = []
-
-    with open(monsoon_file, 'r') as m:
-        for line in m:
-            timestamp = float(line.strip().split()[0])
-            value = float(line.strip().split()[1])
-            for seg_name in arrays.keys():
-                if test_starts[seg_name] <= timestamp <= test_ends[seg_name]:
-                    arrays[seg_name].append(value)
-
-    results = {}
-    for seg_name in arrays:
-        if len(arrays[seg_name]) == 0:
-            continue
-
-        pairs = zip(percentiles, np.percentile(arrays[seg_name],
-                                               percentiles))
-        results[seg_name] = [
-            Metric.amps(p[1], 'percentile_%s' % p[0]).to_unit(MILLIAMP) for p in
-            pairs
-        ]
-    return results
-
-
-def generate_test_metrics(raw_data, timestamps=None,
-                          voltage=None):
-    """Split the data into individual test metrics, based on the timestamps
-    given as a dict.
-
-    Args:
-        raw_data: raw data as list or generator of (timestamp, sample)
-        timestamps: dict following the output format of
-            instrumentation_proto_parser.get_test_timestamps()
-        voltage: voltage used during measurements
-    """
-
-    # Initialize metrics for each test
-    if timestamps is None:
-        timestamps = {}
-    test_starts = {}
-    test_ends = {}
-    test_metrics = {}
-    for seg_name, times in timestamps.items():
-        if START_TIMESTAMP in times and END_TIMESTAMP in times:
-            test_metrics[seg_name] = PowerMetrics(voltage)
-            test_starts[seg_name] = Metric(
-                times[START_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-            test_ends[seg_name] = Metric(
-                times[END_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-
-    # Assign data to tests based on timestamps
-    for timestamp, amps in raw_data:
-        for seg_name in test_metrics.keys():
-            if test_starts[seg_name] <= timestamp <= test_ends[seg_name]:
-                test_metrics[seg_name].update_metrics(amps)
-
-    result = {}
-    for seg_name, power_metrics in test_metrics.items():
-        result[seg_name] = [
-            power_metrics.avg_current,
-            power_metrics.max_current,
-            power_metrics.min_current,
-            power_metrics.stdev_current,
-            power_metrics.avg_power]
-    return result
-
-
-class PowerMetrics(object):
-    """Class for processing raw power metrics generated by Monsoon measurements.
-    Provides useful metrics such as average current, max current, and average
-    power. Can generate individual test metrics.
-
-    See section "Numeric metrics" below for available metrics.
-    """
-
-    def __init__(self, voltage):
-        """Create a PowerMetrics.
-
-        Args:
-            voltage: Voltage of the measurement
-        """
-        self._voltage = voltage
-        self._num_samples = 0
-        self._sum_currents = 0
-        self._sum_squares = 0
-        self._max_current = None
-        self._min_current = None
-        self.test_metrics = {}
-
-    def update_metrics(self, sample):
-        """Update the running metrics with the current sample.
-
-        Args:
-            sample: A current sample in Amps.
-        """
-        self._num_samples += 1
-        self._sum_currents += sample
-        self._sum_squares += sample ** 2
-        if self._max_current is None or sample > self._max_current:
-            self._max_current = sample
-        if self._min_current is None or sample < self._min_current:
-            self._min_current = sample
-
-    # Numeric metrics
-    @property
-    def avg_current(self):
-        """Average current, in milliamps."""
-        if not self._num_samples:
-            return Metric.amps(0).to_unit(MILLIAMP)
-        return (Metric.amps(self._sum_currents / self._num_samples,
-                            'avg_current')
-                .to_unit(MILLIAMP))
-
-    @property
-    def max_current(self):
-        """Max current, in milliamps."""
-        return Metric.amps(self._max_current or 0, 'max_current').to_unit(
-            MILLIAMP)
-
-    @property
-    def min_current(self):
-        """Min current, in milliamps."""
-        return Metric.amps(self._min_current or 0, 'min_current').to_unit(
-            MILLIAMP)
-
-    @property
-    def stdev_current(self):
-        """Standard deviation of current values, in milliamps."""
-        if self._num_samples < 2:
-            return Metric.amps(0, 'stdev_current').to_unit(MILLIAMP)
-        stdev = math.sqrt(
-            (self._sum_squares - (
-                self._num_samples * self.avg_current.to_unit(AMP).value ** 2))
-            / (self._num_samples - 1))
-        return Metric.amps(stdev, 'stdev_current').to_unit(MILLIAMP)
-
-    @property
-    def avg_power(self):
-        """Average power, in milliwatts."""
-        return Metric.watts(self.avg_current.to_unit(AMP).value * self._voltage,
-                            'avg_power').to_unit(MILLIWATT)
diff --git a/src/antlion/controllers/power_monitor.py b/src/antlion/controllers/power_monitor.py
deleted file mode 100644
index 6a229f7..0000000
--- a/src/antlion/controllers/power_monitor.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import tempfile
-
-from antlion.controllers import power_metrics
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-
-
-class ResourcesRegistryError(Exception):
-    pass
-
-
-_REGISTRY = {}
-
-
-def update_registry(registry):
-    """Updates the registry with the one passed.
-
-    Overriding a previous value is not allowed.
-
-    Args:
-        registry: A dictionary.
-    Raises:
-        ResourceRegistryError if a property is updated with a different value.
-    """
-    for k, v in registry.items():
-        if k in _REGISTRY:
-            if v == _REGISTRY[k]:
-                continue
-            raise ResourcesRegistryError(
-                'Overwriting resources_registry fields is not allowed. %s was '
-                'already defined as %s and was attempted to be overwritten '
-                'with %s.' % (k, _REGISTRY[k], v))
-        _REGISTRY[k] = v
-
-
-def get_registry():
-    return _REGISTRY
-
-
-def _write_raw_data_in_standard_format(raw_data, path, start_time):
-    """Writes the raw data to a file in (seconds since epoch, amps).
-
-    TODO(b/155294049): Deprecate this once Monsoon controller output
-        format is updated.
-
-    Args:
-        start_time: Measurement start time in seconds since epoch
-        raw_data: raw data as list or generator of (timestamp, sample)
-        path: path to write output
-    """
-    with open(path, 'w') as f:
-        for timestamp, amps in raw_data:
-            f.write('%s %s\n' %
-                    (timestamp + start_time, amps))
-
-
-class BasePowerMonitor(object):
-
-    def setup(self, **kwargs):
-        raise NotImplementedError()
-
-    def connect_usb(self, **kwargs):
-        raise NotImplementedError()
-
-    def measure(self, **kwargs):
-        raise NotImplementedError()
-
-    def release_resources(self, **kwargs):
-        raise NotImplementedError()
-
-    def disconnect_usb(self, **kwargs):
-        raise NotImplementedError()
-
-    def get_metrics(self, **kwargs):
-        raise NotImplementedError()
-
-    def get_waveform(self, **kwargs):
-        raise NotImplementedError()
-
-    def teardown(self, **kwargs):
-        raise NotImplementedError()
-
-
-class PowerMonitorMonsoonFacade(BasePowerMonitor):
-
-    def __init__(self, monsoon):
-        """Constructs a PowerMonitorFacade.
-
-        Args:
-            monsoon: delegate monsoon object, either
-                antlion.controllers.monsoon_lib.api.hvpm.monsoon.Monsoon or
-                antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon.Monsoon.
-        """
-        self.monsoon = monsoon
-        self._log = logging.getLogger()
-
-    def setup(self, monsoon_config=None, **__):
-        """Set up the Monsoon controller for this testclass/testcase."""
-
-        if monsoon_config is None:
-            raise MonsoonError('monsoon_config can not be None')
-
-        self._log.info('Setting up Monsoon %s' % self.monsoon.serial)
-        voltage = monsoon_config.get_numeric('voltage', 4.2)
-        self.monsoon.set_voltage_safe(voltage)
-        if 'max_current' in monsoon_config:
-            self.monsoon.set_max_current(
-                monsoon_config.get_numeric('max_current'))
-
-    def power_cycle(self, monsoon_config=None, **__):
-        """Power cycles the delegated monsoon controller."""
-
-        if monsoon_config is None:
-            raise MonsoonError('monsoon_config can not be None')
-
-        self._log.info('Setting up Monsoon %s' % self.monsoon.serial)
-        voltage = monsoon_config.get_numeric('voltage', 4.2)
-        self._log.info('Setting up Monsoon voltage %s' % voltage)
-        self.monsoon.set_voltage_safe(0)
-        if 'max_current' in monsoon_config:
-            self.monsoon.set_max_current(
-                monsoon_config.get_numeric('max_current'))
-            self.monsoon.set_max_initial_current(
-                monsoon_config.get_numeric('max_current'))
-        self.connect_usb()
-        self.monsoon.set_voltage_safe(voltage)
-
-    def connect_usb(self, **__):
-        self.monsoon.usb('on')
-
-    def measure(self, measurement_args=None, start_time=None,
-                monsoon_output_path=None, **__):
-        if measurement_args is None:
-            raise MonsoonError('measurement_args can not be None')
-
-        with tempfile.NamedTemporaryFile(prefix='monsoon_') as tmon:
-            self.monsoon.measure_power(**measurement_args,
-                                       output_path=tmon.name)
-
-            if monsoon_output_path and start_time is not None:
-                _write_raw_data_in_standard_format(
-                    power_metrics.import_raw_data(tmon.name),
-                    monsoon_output_path, start_time)
-
-    def release_resources(self, **__):
-        # nothing to do
-        pass
-
-    def disconnect_usb(self, **__):
-        self.monsoon.usb('off')
-
-    def get_waveform(self, file_path=None):
-        """Parses a file to obtain all current (in amps) samples.
-
-        Args:
-            file_path: Path to a monsoon file.
-
-        Returns:
-            A list of tuples in which the first element is a timestamp and the
-            second element is the sampled current at that time.
-        """
-        if file_path is None:
-            raise MonsoonError('file_path can not be None')
-
-        return list(power_metrics.import_raw_data(file_path))
-
-    def get_metrics(self, start_time=None, voltage=None, monsoon_file_path=None,
-                    timestamps=None, **__):
-        """Parses a monsoon_file_path to compute the consumed power and other
-        power related metrics.
-
-        Args:
-            start_time: Time when the measurement started, this is used to
-                correlate timestamps from the device and from the power samples.
-            voltage: Voltage used when the measurement started. Used to compute
-                power from current.
-            monsoon_file_path: Path to a monsoon file.
-            timestamps: Named timestamps delimiting the segments of interest.
-            **__:
-
-        Returns:
-            A list of power_metrics.Metric.
-        """
-        if start_time is None:
-            raise MonsoonError('start_time can not be None')
-        if voltage is None:
-            raise MonsoonError('voltage can not be None')
-        if monsoon_file_path is None:
-            raise MonsoonError('monsoon_file_path can not be None')
-        if timestamps is None:
-            raise MonsoonError('timestamps can not be None')
-
-        return power_metrics.generate_test_metrics(
-            power_metrics.import_raw_data(monsoon_file_path),
-            timestamps=timestamps, voltage=voltage)
-
-    def teardown(self, **__):
-        # nothing to do
-        pass
diff --git a/src/antlion/controllers/relay_device_controller.py b/src/antlion/controllers/relay_device_controller.py
deleted file mode 100644
index 158e484..0000000
--- a/src/antlion/controllers/relay_device_controller.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-
-from antlion.controllers.relay_lib.relay_rig import RelayRig
-
-MOBLY_CONTROLLER_CONFIG_NAME = "RelayDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "relay_devices"
-
-
-def create(config):
-    """Creates RelayDevice controller objects.
-
-        Args:
-            config: Either one of two types:
-
-            A filename to a RelayController config (json file)
-            A RelayController config/dict composed of:
-                boards: A list of controller boards (see tests).
-                devices: A list of RelayDevices attached to the boards.
-
-        Returns:
-                A list of RelayDevice objects.
-    """
-    if type(config) is str:
-        return _create_from_external_config_file(config)
-    elif type(config) is dict:
-        return _create_from_dict(config)
-
-
-def _create_from_external_config_file(config_filename):
-    """Creates RelayDevice controller objects from an external config file.
-
-    Args:
-        config_filename: The filename of the RelayController config.
-
-    Returns:
-        A list of RelayDevice objects.
-    """
-    with open(config_filename) as json_file:
-        return _create_from_dict(json.load(json_file))
-
-
-def _create_from_dict(config):
-    """Creates RelayDevice controller objects from a dictionary.
-
-    Args:
-        config: The dictionary containing the RelayController config.
-
-    Returns:
-        A list of RelayDevice objects.
-    """
-    devices = list()
-
-    relay_rig = RelayRig(config)
-    for device in relay_rig.devices.values():
-        devices.append(device)
-
-    return devices
-
-
-def destroy(relay_devices):
-    """Cleans up RelayDevice objects.
-
-        Args:
-            relay_devices: A list of AndroidDevice objects.
-    """
-    for device in relay_devices:
-        device.clean_up()
-
-
-def get_info(relay_devices):
-    """Get information on a list of RelayDevice objects.
-
-    Args:
-        relay_devices: A list of RelayDevice objects.
-
-    Returns:
-        A list of dict, each representing info for an RelayDevice objects.
-    """
-    device_info = []
-    for device in relay_devices:
-        relay_ids = list()
-        for relay in device.relays:
-            relay_ids.append(relay)
-        info = {"name": device.name, "relays": relay_ids}
-        device_info.append(info)
-    return device_info
diff --git a/src/antlion/controllers/relay_lib/__init__.py b/src/antlion/controllers/relay_lib/__init__.py
deleted file mode 100644
index 7f1a899..0000000
--- a/src/antlion/controllers/relay_lib/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/src/antlion/controllers/relay_lib/ak_xb10_speaker.py b/src/antlion/controllers/relay_lib/ak_xb10_speaker.py
deleted file mode 100644
index 465cf92..0000000
--- a/src/antlion/controllers/relay_lib/ak_xb10_speaker.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-POWER_ON_WAIT_TIME = 2
-POWER_OFF_WAIT_TIME = 6
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-    PAIR = 'Pair'
-
-
-class AkXB10Speaker(BluetoothRelayDevice):
-    """A&K XB10 Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_on(self):
-        self._hold_button(Buttons.POWER, POWER_ON_WAIT_TIME)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_OFF_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        self._hold_button(Buttons.PAIR, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/devices/__init__.py b/src/antlion/controllers/relay_lib/devices/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/relay_lib/devices/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/relay_lib/devices/bluetooth_relay_device.py b/src/antlion/controllers/relay_lib/devices/bluetooth_relay_device.py
deleted file mode 100644
index 369f72d..0000000
--- a/src/antlion/controllers/relay_lib/devices/bluetooth_relay_device.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.controllers.relay_lib.generic_relay_device import GenericRelayDevice
-from antlion.controllers.relay_lib.helpers import validate_key
-
-
-class BluetoothRelayDevice(GenericRelayDevice):
-    """A base class for bluetooth devices.
-
-    This base class is similar to GenericRelayDevice, but requires a mac_address
-    to be set from within the config taken in. This helps with type checking
-    for use of relays against bluetooth utils.
-    """
-    def __init__(self, config, relay_rig):
-        GenericRelayDevice.__init__(self, config, relay_rig)
-
-        self.mac_address = validate_key('mac_address', config, str,
-                                        self.__class__.__name__)
-
-    def get_mac_address(self):
-        """Returns the mac address of this device."""
-        return self.mac_address
-
diff --git a/src/antlion/controllers/relay_lib/dongles.py b/src/antlion/controllers/relay_lib/dongles.py
deleted file mode 100644
index 234a58b..0000000
--- a/src/antlion/controllers/relay_lib/dongles.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-# Necessary timeout in between commands
-CMD_TIMEOUT = 1.2
-# Pairing mode activation wait time
-PAIRING_MODE_WAIT_TIME = 4.5
-SINGLE_ACTION_SHORT_WAIT_TIME = 0.6
-SINGLE_ACTION_LONG_WAIT_TIME = 2.0
-MISSING_RELAY_MSG = 'Relay config for Three button "%s" missing relay "%s".'
-
-
-class Buttons(enum.Enum):
-    ACTION = 'Action'
-    NEXT = 'Next'
-    PREVIOUS = 'Previous'
-
-
-class SingleButtonDongle(BluetoothRelayDevice):
-    """A Bluetooth dongle with one generic button Normally action.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relay(Buttons.ACTION.value)
-
-    def enter_pairing_mode(self):
-        """Enters pairing mode. Blocks the thread until pairing mode is set.
-
-        Holds down the 'ACTION' buttons for PAIRING_MODE_WAIT_TIME seconds.
-        """
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=PAIRING_MODE_WAIT_TIME)
-
-    def press_play_pause(self):
-        """Briefly presses the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-
-    def press_vr_mode(self):
-        """Long press the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_LONG_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-
-class ThreeButtonDongle(BluetoothRelayDevice):
-    """A Bluetooth dongle with three generic buttons Normally action, next, and
-     previous.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-    def enter_pairing_mode(self):
-        """Enters pairing mode. Blocks the thread until pairing mode is set.
-
-        Holds down the 'ACTION' buttons for a little over 5 seconds.
-        """
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=PAIRING_MODE_WAIT_TIME)
-
-    def press_play_pause(self):
-        """Briefly presses the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
-
-    def press_vr_mode(self):
-        """Long press the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_LONG_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
-
-    def press_next(self):
-        """Briefly presses the Next button."""
-        self.relays[Buttons.NEXT.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
-
-    def press_previous(self):
-        """Briefly presses the Previous button."""
-        self.relays[Buttons.PREVIOUS.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
diff --git a/src/antlion/controllers/relay_lib/earstudio_receiver.py b/src/antlion/controllers/relay_lib/earstudio_receiver.py
deleted file mode 100644
index 50bf62d..0000000
--- a/src/antlion/controllers/relay_lib/earstudio_receiver.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-SHORT_PRESS_WAIT_TIME = 0.5
-MEDIUM_PRESS_WAIT_TIME = 3.0
-LONG_PRESS_WAIT_TIME = 4.5
-WAIT_FOR_EFFECT_TIME = 1
-
-
-class Buttons(enum.Enum):
-    NEXT = 'Next'
-    PREVIOUS = "Previous"
-    PLAY_PAUSE = 'Play_pause'
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-
-
-class EarstudioReceiver(BluetoothRelayDevice):
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def power_on(self):
-        """Power on the Earstudio device.
-
-        BLUE LED blinks once when power is on. "power-on sound" plays when it is
-        on. Automatically connects to a device that has been connected before.
-        GREEN LED blinks once every 3 seconds after the "connection sound."
-        Enters Discoverable Mode/Paring Mode when there is no device that has
-        been connected before. GREEN LED blinks twice every 0.5 seconds.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def power_off(self):
-        """Power off the Earstudio device.
-
-        RED LED blinks once right before power off. "power-off sound" plays when
-        it is off.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(LONG_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_play_pause(self):
-        """Toggle audio play state.
-
-        GREEN LED slowly blinks once every 3 seconds during Bluetooth/USB
-        playback.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_accept_call(self):
-        """Receive incoming call.
-
-        BLUE LED slowly blinks once every 3 seconds
-        "Call-receiving sound" when received.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_reject_call(self):
-        """Reject incoming call.
-
-        "Call-rejection sound" when refused.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_end_call(self):
-        """End ongoing call.
-
-        "Call-end sound" when ended.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_next(self):
-        """Skip to the next track."""
-        self.relays[Buttons.NEXT.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def toggle_ambient_mode(self):
-        """Turn ambient mode on/off.
-
-        Only available during playback.
-        To use it, you must set 'Ambient Shortcut Key' to 'on' in the EarStudio
-        app.
-        """
-        self.relays[Buttons.NEXT.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_previous(self):
-        """Rewind to beginning of current or previous track."""
-        self.relays[Buttons.PREVIOUS.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def enter_pairing_mode(self):
-        """Enter BlueTooth pairing mode.
-
-        GREEN LED blinks twice every 0.5 seconds after "enter paring-mode
-        sound." Disconnects from the current connected device when entering
-        this mode.
-        """
-        self.relays[Buttons.PREVIOUS.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_up(self, press_duration=SHORT_PRESS_WAIT_TIME):
-        """Turn up the volume.
-
-        Volume increases by 0.5dB for each press.
-        Press&holding the button increases the volume consistently up to 6dB.
-        Args:
-          press_duration (int|float): how long to hold button for.
-        """
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(press_duration)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_down(self, press_duration=SHORT_PRESS_WAIT_TIME):
-        """Turn down the volume.
-
-        Volume decreases by 0.5dB for each press.
-        Press&hold the button decreases the volume consistently down to -60dB.
-        Pressing the button at the minimum volume turns to a mute level.
-        Args:
-          press_duration (int|float): how long to hold button for.
-        """
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(press_duration)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
diff --git a/src/antlion/controllers/relay_lib/errors.py b/src/antlion/controllers/relay_lib/errors.py
deleted file mode 100644
index 5af5d60..0000000
--- a/src/antlion/controllers/relay_lib/errors.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion import signals
-
-
-class RelayConfigError(signals.ControllerError):
-    """An error found within the RelayRig config file."""
-
-
-class RelayDeviceConnectionError(signals.ControllerError):
-    """An error for being unable to connect to the device."""
diff --git a/src/antlion/controllers/relay_lib/fugu_remote.py b/src/antlion/controllers/relay_lib/fugu_remote.py
deleted file mode 100644
index db706c0..0000000
--- a/src/antlion/controllers/relay_lib/fugu_remote.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-import enum
-
-from antlion.controllers.relay_lib.relay import SynchronizeRelays
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5.2
-
-
-class Buttons(enum.Enum):
-    HOME = 'Home'
-    BACK = 'Back'
-    PLAY_PAUSE = 'Play'
-
-
-class FuguRemote(BluetoothRelayDevice):
-    """A Nexus Player (Fugu) Remote.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-        # If the Fugu remote does have a power relay attached, turn it on.
-        power = 'Power'
-        if power in self.relays:
-            self.relays[power].set_nc()
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-    def enter_pairing_mode(self):
-        """Enters pairing mode. Blocks the thread until pairing mode is set.
-
-        Holds down the 'Home' and 'Back' buttons for a little over 5 seconds.
-        """
-        with SynchronizeRelays():
-            self.hold_down(Buttons.HOME.value)
-            self.hold_down(Buttons.BACK.value)
-
-        time.sleep(PAIRING_MODE_WAIT_TIME)
-
-        with SynchronizeRelays():
-            self.release(Buttons.HOME.value)
-            self.release(Buttons.BACK.value)
-
-    def press_play_pause(self):
-        """Briefly presses the Play/Pause button."""
-        self.press(Buttons.PLAY_PAUSE.value)
-
-    def press_home(self):
-        """Briefly presses the Home button."""
-        self.press(Buttons.HOME.value)
-
-    def press_back(self):
-        """Briefly presses the Back button."""
-        self.press(Buttons.BACK.value)
diff --git a/src/antlion/controllers/relay_lib/generic_relay_device.py b/src/antlion/controllers/relay_lib/generic_relay_device.py
deleted file mode 100644
index cf93400..0000000
--- a/src/antlion/controllers/relay_lib/generic_relay_device.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.relay import SynchronizeRelays
-from antlion.controllers.relay_lib.relay_device import RelayDevice
-
-MISSING_RELAY_MSG = 'Relay config for %s device "%s" missing relay "%s".'
-
-
-class GenericRelayDevice(RelayDevice):
-    """A default, all-encompassing implementation of RelayDevice.
-
-    This class allows for quick access to getting relay switches through the
-    subscript ([]) operator. Note that it does not allow for re-assignment or
-    additions to the relays dictionary.
-    """
-
-    def __init__(self, config, relay_rig):
-        RelayDevice.__init__(self, config, relay_rig)
-
-    def _ensure_config_contains_relays(self, relay_names):
-        for relay_name in relay_names:
-            self._ensure_config_contains_relay(relay_name)
-
-    def _ensure_config_contains_relay(self, relay_name):
-        """Throws an error if the relay does not exist."""
-        if relay_name not in self.relays:
-            raise RelayConfigError(MISSING_RELAY_MSG % (self.__class__.__name__,
-                                                        self.name, relay_name))
-
-    def get_button_names(self):
-        """Returns the list of all button names."""
-        return list(self.relays.keys())
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        with SynchronizeRelays():
-            for relay in self.relays.values():
-                relay.set_no()
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        with SynchronizeRelays():
-            for relay in self.relays.values():
-                if relay.is_dirty():
-                    relay.set_no()
-
-    def press(self, button_name):
-        """Presses the button for a short period of time.
-
-        Args:
-            button_name: the name of the button to press.
-        """
-        self.relays[button_name].set_nc_for()
-
-    def hold_down(self, button_name):
-        """Holds down the button until release is called.
-
-        If the button is already being held, the state does not change.
-
-        Args:
-            button_name: the name of the button to hold down.
-        """
-        self.relays[button_name].set_nc()
-
-    def release(self, button_name):
-        """Releases the held down button with name 'button_name'.
-
-        If the button is already depressed, the state does not change.
-
-        Args:
-            button_name: the name of the button to release.
-        """
-        self.relays[button_name].set_no()
diff --git a/src/antlion/controllers/relay_lib/headset.py b/src/antlion/controllers/relay_lib/headset.py
deleted file mode 100644
index 119b4f6..0000000
--- a/src/antlion/controllers/relay_lib/headset.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-import enum
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 9
-POWER_TOGGLE_WAIT_TIME = 2
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class Headset(BluetoothRelayDevice):
-    """Headset with same Power and Pair Button.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def turn_power_on_and_enter_pairing_mode(self):
-        self._hold_button(Buttons.POWER, PAIRING_MODE_WAIT_TIME)
diff --git a/src/antlion/controllers/relay_lib/helpers.py b/src/antlion/controllers/relay_lib/helpers.py
deleted file mode 100644
index 463946c..0000000
--- a/src/antlion/controllers/relay_lib/helpers.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from six import string_types
-
-MISSING_KEY_ERR_MSG = 'key "%s" missing from %s. Offending object:\n %s'
-TYPE_MISMATCH_ERR_MSG = 'Key "%s" is of type %s. Expecting %s.' \
-                        ' Offending object:\n %s'
-
-
-def validate_key(key, dictionary, expected_type, source):
-    """Validates if a key exists and its value is the correct type.
-    Args:
-        key: The key in dictionary.
-        dictionary: The dictionary that should contain key.
-        expected_type: the type that key's value should have.
-        source: The name of the object being checked. Used for error messages.
-
-    Returns:
-        The value of dictionary[key] if no error was raised.
-
-    Raises:
-        RelayConfigError if the key does not exist, or is not of expected_type.
-    """
-    if key not in dictionary:
-        raise RelayConfigError(MISSING_KEY_ERR_MSG % (key, source, dictionary))
-    if expected_type == str:
-        if not isinstance(dictionary[key], string_types):
-            raise RelayConfigError(TYPE_MISMATCH_ERR_MSG %
-                                   (key, dictionary[key], expected_type,
-                                    dictionary))
-    elif not isinstance(dictionary[key], expected_type):
-        raise RelayConfigError(TYPE_MISMATCH_ERR_MSG %
-                               (key, dictionary[key], expected_type,
-                                dictionary))
-    return dictionary[key]
diff --git a/src/antlion/controllers/relay_lib/i6s_headset.py b/src/antlion/controllers/relay_lib/i6s_headset.py
deleted file mode 100644
index 7de5eba..0000000
--- a/src/antlion/controllers/relay_lib/i6s_headset.py
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 3
-WAIT_TIME = 0.1
-MISSING_RELAY_MSG = 'Relay config for i6s Headset "%s" missing relay "%s".'
-
-
-class Buttons(enum.Enum):
-    POWER = "Power"
-    NEXT = 'Next'
-    PREVIOUS = "Previous"
-    PLAY_PAUSE = 'Play_pause'
-    PAIR = "Pair"
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-
-
-class I6sHeadset(BluetoothRelayDevice):
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Turns off headset."""
-        self.relays[Buttons.PAIR.value].set_no_for(PAIRING_MODE_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        """Sets relay in paring mode."""
-        self.relays[Buttons.PAIR.value].set_no_for(PAIRING_MODE_WAIT_TIME)
-
-    def power_on(self):
-        """Power on relay."""
-        self.relays[Buttons.POWER.value].set_no_for(WAIT_TIME)
-
-    def press_play_pause(self):
-        """
-        Sets relay to
-            Play state : if there is no A2DP_streaming.
-            Pause state : if there is A2DP_streaming.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_no_for(WAIT_TIME)
-
-    def press_next(self):
-        """Skips to next song from relay_device."""
-        self.relays[Buttons.NEXT.value].set_no_for(WAIT_TIME)
-
-    def press_previous(self):
-        """Skips to previous song from relay_device."""
-        self.relays[Buttons.PREVIOUS.value].set_no_for(WAIT_TIME)
-
-    def press_volume_up(self):
-        """Increases volume from relay_device."""
-        self.relays[Buttons.VOLUME_UP.value].set_no_for(WAIT_TIME)
-
-    def press_volume_down(self):
-        """Decreases volume from relay_device."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_no_for(WAIT_TIME)
-
-    def press_initiate_call(self):
-        """Initiate call from relay device."""
-        for i in range(0, 2):
-            self.relays[Buttons.POWER.value].set_no_for(WAIT_TIME)
-        return True
-
-    def press_accept_call(self):
-        """Accepts call from relay device."""
-        self.relays[Buttons.POWER.value].set_no_for(WAIT_TIME)
-        return True
diff --git a/src/antlion/controllers/relay_lib/jaybird_x3.py b/src/antlion/controllers/relay_lib/jaybird_x3.py
deleted file mode 100644
index 991267a..0000000
--- a/src/antlion/controllers/relay_lib/jaybird_x3.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-SHORT_PRESS_WAIT_TIME = 0.5
-MED_PRESS_WAIT_TIME = 1.5
-POWER_ON_WAIT_TIME = 2.5
-LONG_PRESS_WAIT_TIME = 4.5
-
-WAIT_FOR_EFFECT_TIME = 2.5
-
-
-class Buttons(enum.Enum):
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-    POWER = "Power"
-
-
-class JaybirdX3Earbuds(BluetoothRelayDevice):
-    """Jaybird X3 earbuds model
-
-    A relay device class for Jaybird X3 earbuds that provides basic Bluetooth
-    """
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def power_off(self):
-        """If the device powers off, the LED will flash red before it
-        powers off. A voice prompt will say "POWER_OFF".
-        """
-        self.relays[Buttons.POWER.value].set_nc_for(LONG_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def power_on(self):
-        """If the device powers on, the LED will flash green.
-        A voice prompt will say "POWER ON".
-        """
-        self.relays[Buttons.POWER.value].set_nc_for(POWER_ON_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def enter_pairing_mode(self):
-        """The Jaybird can only enter pairing mode from an OFF state.
-        """
-        self.power_on()
-        self.power_off()
-        self.relays[Buttons.POWER.value].set_nc_for(LONG_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_play_pause(self):
-        """Toggles the audio play state."""
-        self.relays[Buttons.POWER.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def activate_voice_commands(self):
-        """Activates voice commands during music streaming."""
-        self.relays[Buttons.POWER.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_accept_call(self):
-        """Receives an incoming call."""
-        self.relays[Buttons.POWER.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_reject_call(self):
-        """Rejects an incoming call."""
-        self.relays[Buttons.POWER.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_next(self):
-        """Skips to the next track."""
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_previous(self):
-        """Rewinds to beginning of current or previous track."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_up(self):
-        """Turns up the volume."""
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_down(self):
-        """Turns down the volume."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def toggle_hands_free(self):
-        """Switches call audio between the phone and X3 buds."""
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def mute_phone_call(self):
-        """Mutes phone call audio."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
diff --git a/src/antlion/controllers/relay_lib/logitech_headset.py b/src/antlion/controllers/relay_lib/logitech_headset.py
deleted file mode 100644
index 5c95bac..0000000
--- a/src/antlion/controllers/relay_lib/logitech_headset.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Device Details:
-https://www.logitech.com/en-in/product/bluetooth-audio-adapter#specification-tabular
-"""
-import enum
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-WAIT_TIME = 0.1
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-    PAIR = 'Pair'
-
-
-class LogitechAudioReceiver(BluetoothRelayDevice):
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-    def power_on(self):
-        """Power on relay."""
-        self.relays[Buttons.POWER.value].set_nc()
-
-    def enter_pairing_mode(self):
-        """Sets relay in paring mode."""
-        self.relays[Buttons.PAIR.value].set_nc()
diff --git a/src/antlion/controllers/relay_lib/power_supply.py b/src/antlion/controllers/relay_lib/power_supply.py
deleted file mode 100644
index f1c6213..0000000
--- a/src/antlion/controllers/relay_lib/power_supply.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-import enum
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-POWER_TOGGLE_WAIT_TIME = 0.5
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class PowerSupply(BluetoothRelayDevice):
-    """Power Supply for Headset.
-
-    Wraps the button presses.
-    """
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def setup(self):
-        """Do nothing, since this is not a headset"""
-        return True
-
-    def clean_up(self):
-        """Do nothing, since this is not a headset"""
-        return True
diff --git a/src/antlion/controllers/relay_lib/rdl_relay_board.py b/src/antlion/controllers/relay_lib/rdl_relay_board.py
deleted file mode 100644
index e4d5c37..0000000
--- a/src/antlion/controllers/relay_lib/rdl_relay_board.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.usb_relay_board_base import UsbRelayBoardBase
-from pylibftdi import BitBangDevice
-
-
-class RdlRelayBoard(UsbRelayBoardBase):
-    def set(self, relay_position, value):
-        """Returns the current status of the passed in relay.
-
-        Args:
-            relay_position: Relay position.
-            value: Turn_on or Turn_off the relay for the given relay_position.
-        """
-        with BitBangDevice(self.device) as bb:
-            if value == RelayState.NO:
-                bb.port |= self.address[relay_position]
-            else:
-                bb.port &= ~(self.address[relay_position])
-        self.status_dict[relay_position] = value
diff --git a/src/antlion/controllers/relay_lib/relay.py b/src/antlion/controllers/relay_lib/relay.py
deleted file mode 100644
index fbac1de..0000000
--- a/src/antlion/controllers/relay_lib/relay.py
+++ /dev/null
@@ -1,207 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-from time import sleep
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-
-
-class RelayState(Enum):
-    """Enum for possible Relay States."""
-    # Pretend this means 'OFF'
-    NO = 'NORMALLY_OPEN'
-    # Pretend this means 'ON'
-    NC = 'NORMALLY_CLOSED'
-
-
-class SynchronizeRelays:
-    """A class that allows for relays to change state nearly simultaneously.
-
-    Can be used with the 'with' statement in Python:
-
-    with SynchronizeRelays():
-        relay1.set_no()
-        relay2.set_nc()
-
-    Note that the thread will still wait for RELAY_TRANSITION_WAIT_TIME
-    after execution leaves the 'with' statement.
-    """
-    _sync_sleep_flag = False
-
-    def __enter__(self):
-        self.prev_toggle_time = Relay.transition_wait_time
-        self.prev_sync_flag = SynchronizeRelays._sync_sleep_flag
-        Relay.transition_wait_time = 0
-        SynchronizeRelays._sync_sleep_flag = False
-
-    def __exit__(self, type, value, traceback):
-        if SynchronizeRelays._sync_sleep_flag:
-            sleep(Relay.transition_wait_time)
-
-        Relay.transition_wait_time = self.prev_toggle_time
-        SynchronizeRelays._sync_sleep_flag = self.prev_sync_flag
-
-
-class Relay(object):
-    """A class representing a single relay switch on a RelayBoard.
-
-    References to these relays are stored in both the RelayBoard and the
-    RelayDevice classes under the variable "relays". GenericRelayDevice can also
-    access these relays through the subscript ([]) operator.
-
-    At the moment, relays only have a valid state of 'ON' or 'OFF'. This may be
-    extended in a subclass if needed. Keep in mind that if this is done, changes
-    will also need to be made in the RelayRigParser class to initialize the
-    relays.
-
-    """
-    """How long to wait for relays to transition state."""
-    transition_wait_time = .2
-    button_press_time = .25
-
-    def __init__(self, relay_board, position):
-        self.relay_board = relay_board
-        self.position = position
-        self._original_state = None
-        self.relay_id = "%s/%s" % (self.relay_board.name, self.position)
-
-    def set_no(self):
-        """Sets the relay to the 'NO' state. Shorthand for set(RelayState.NO).
-
-        Blocks the thread for Relay.transition_wait_time.
-        """
-        self.set(RelayState.NO)
-
-    def set_nc(self):
-        """Sets the relay to the 'NC' state. Shorthand for set(RelayState.NC).
-
-        Blocks the thread for Relay.transition_wait_time.
-
-        """
-        self.set(RelayState.NC)
-
-    def toggle(self):
-        """Swaps the state from 'NO' to 'NC' or 'NC' to 'NO'.
-        Blocks the thread for Relay.transition_wait_time.
-        """
-        if self.get_status() == RelayState.NO:
-            self.set(RelayState.NC)
-        else:
-            self.set(RelayState.NO)
-
-    def set(self, state):
-        """Sets the relay to the 'NO' or 'NC' state.
-
-        Blocks the thread for Relay.transition_wait_time.
-
-        Args:
-            state: either 'NO' or 'NC'.
-
-        Raises:
-            ValueError if state is not 'NO' or 'NC'.
-
-        """
-        if self._original_state is None:
-            self._original_state = self.relay_board.get_relay_status(
-                self.position)
-
-        if state is not RelayState.NO and state is not RelayState.NC:
-            raise ValueError(
-                'Invalid state. Received "%s". Expected any of %s.' %
-                (state, [state for state in RelayState]))
-        if self.get_status() != state:
-            self.relay_board.set(self.position, state)
-            SynchronizeRelays._sync_sleep_flag = True
-            sleep(Relay.transition_wait_time)
-
-    def set_no_for(self, seconds=button_press_time):
-        """Sets the relay to 'NORMALLY_OPEN' for seconds. Blocks the thread.
-
-        Args:
-            seconds: The number of seconds to sleep for.
-        """
-        self.set_no()
-        sleep(seconds)
-        self.set_nc()
-
-    def set_nc_for(self, seconds=button_press_time):
-        """Sets the relay to 'NORMALLY_CLOSED' for seconds. Blocks the thread.
-
-        Respects Relay.transition_wait_time for toggling state.
-
-        Args:
-            seconds: The number of seconds to sleep for.
-        """
-        self.set_nc()
-        sleep(seconds)
-        self.set_no()
-
-    def get_status(self):
-        return self.relay_board.get_relay_status(self.position)
-
-    def clean_up(self):
-        """Does any clean up needed to allow the next series of tests to run.
-
-        For now, all this does is switches to its previous state. Inheriting
-        from this class and overriding this method would be the best course of
-        action to allow a more complex clean up to occur. If you do this, be
-        sure to make the necessary modifications in RelayRig.initialize_relay
-        and RelayRigParser.parse_json_relays.
-        """
-        if self._original_state is not None:
-            self.set(self._original_state)
-
-    def is_dirty(self):
-        return self._original_state is not None
-
-
-class RelayDict(object):
-    """A wrapped dictionary that gives config errors upon failure.
-
-    Has the same interface as a dictionary, but when getting the key fails, the
-    dictionary returns a RelayConfigError, letting the user know that the reason
-    the dict failed to return a relay is because the relay was not found in the
-    config.
-
-    Also prevents modification of elements, because changing the relays here
-    does not change what they are in hardware.
-    """
-    ERROR_MESSAGE = ('Error: Attempted to get relay "%s" in %s "%s" but the '
-                     'relay does not exist.\nExisting relays are: %s.\nMake '
-                     'sure the missing relay is added to the config file, and '
-                     'is properly setup.')
-
-    def __init__(self, relay_device, input_dict):
-        self.relay_device = relay_device
-        self._store = input_dict
-
-    def __getitem__(self, key):
-        try:
-            return self._store[key]
-        except KeyError:
-            raise RelayConfigError(self.ERROR_MESSAGE %
-                                   (key, type(self.relay_device),
-                                    self.relay_device.name, self._store))
-
-    def __iter__(self):
-        return iter(self._store)
-
-    def __len__(self):
-        return len(self._store)
-
-    def __repr__(self):
-        return repr(self._store)
diff --git a/src/antlion/controllers/relay_lib/relay_board.py b/src/antlion/controllers/relay_lib/relay_board.py
deleted file mode 100644
index 464326d..0000000
--- a/src/antlion/controllers/relay_lib/relay_board.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.helpers import validate_key
-from antlion.controllers.relay_lib.relay import Relay
-
-
-class RelayBoard(object):
-    """Handles interfacing with the Relays and RelayDevices.
-
-    This is the base class for all RelayBoards.
-    """
-
-    def __init__(self, config):
-        """Creates a RelayBoard instance. Handles naming and relay creation.
-
-        Args:
-            config: A configuration dictionary, usually pulled from an element
-            under in "boards" list in the relay rig config file.
-        """
-        self.name = validate_key('name', config, str, 'config')
-        if '/' in self.name:
-            raise RelayConfigError('RelayBoard name cannot contain a "/".')
-        self.relays = dict()
-        for pos in self.get_relay_position_list():
-            self.relays[pos] = Relay(self, pos)
-
-    def set(self, relay_position, state):
-        """Sets the relay to the given state.
-
-        Args:
-            relay_position: the relay having its state modified.
-            state: the state to set the relay to. Currently only states NO and
-                   NC are supported.
-        """
-        raise NotImplementedError()
-
-    def get_relay_position_list(self):
-        """Returns a list of all possible relay positions."""
-        raise NotImplementedError()
-
-    def get_relay_status(self, relay):
-        """Returns the state of the given relay."""
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/relay_lib/relay_device.py b/src/antlion/controllers/relay_lib/relay_device.py
deleted file mode 100644
index 06bf42f..0000000
--- a/src/antlion/controllers/relay_lib/relay_device.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.helpers import validate_key
-
-
-class RelayDevice(object):
-    """The base class for all relay devices.
-
-    RelayDevice has access to both its relays as well as the relay rig it is
-    a part of. Note that you can receive references to the relay_boards
-    through relays[0...n].board. The relays are not guaranteed to be on
-    the same relay board.
-    """
-
-    def __init__(self, config, relay_rig):
-        """Creates a RelayDevice.
-
-        Args:
-            config: The dictionary found in the config file for this device.
-            You can add your own params to the config file if needed, and they
-            will be found in this dictionary.
-            relay_rig: The RelayRig the device is attached to. This won't be
-            useful for classes that inherit from RelayDevice, so just pass it
-            down to this __init__.
-        """
-        self.rig = relay_rig
-        self.relays = dict()
-
-        validate_key('name', config, str, '"devices" element')
-        self.name = config['name']
-
-        relays = validate_key('relays', config, dict, '"devices" list element')
-        if len(relays) < 1:
-            raise RelayConfigError(
-                'Key "relays" must have at least 1 element.')
-
-        for name, relay_id in relays.items():
-            self.relays[name] = relay_rig.relays[relay_id]
-
-    def setup(self):
-        """Sets up the relay device to be ready for commands."""
-
-    def clean_up(self):
-        """Sets the relay device back to its inert state."""
diff --git a/src/antlion/controllers/relay_lib/relay_rig.py b/src/antlion/controllers/relay_lib/relay_rig.py
deleted file mode 100644
index 835dd66..0000000
--- a/src/antlion/controllers/relay_lib/relay_rig.py
+++ /dev/null
@@ -1,177 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import collections
-
-from antlion.controllers.relay_lib.ak_xb10_speaker import AkXB10Speaker
-from antlion.controllers.relay_lib.dongles import SingleButtonDongle
-from antlion.controllers.relay_lib.dongles import ThreeButtonDongle
-from antlion.controllers.relay_lib.earstudio_receiver import EarstudioReceiver
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.fugu_remote import FuguRemote
-from antlion.controllers.relay_lib.generic_relay_device import GenericRelayDevice
-from antlion.controllers.relay_lib.headset import Headset
-from antlion.controllers.relay_lib.helpers import validate_key
-from antlion.controllers.relay_lib.i6s_headset import I6sHeadset
-from antlion.controllers.relay_lib.jaybird_x3 import JaybirdX3Earbuds
-from antlion.controllers.relay_lib.logitech_headset import LogitechAudioReceiver
-from antlion.controllers.relay_lib.power_supply import PowerSupply
-from antlion.controllers.relay_lib.rdl_relay_board import RdlRelayBoard
-from antlion.controllers.relay_lib.sain_smart_board import SainSmartBoard
-from antlion.controllers.relay_lib.sain_smart_8_channel_usb_relay_board import SainSmart8ChannelUsbRelayBoard
-from antlion.controllers.relay_lib.skullcandy import Skullcandy
-from antlion.controllers.relay_lib.sony_xb2_speaker import SonyXB2Speaker
-from antlion.controllers.relay_lib.sony_xb20_speaker import SonyXB20Speaker
-from antlion.controllers.relay_lib.tao_tronics_headset import TaoTronicsCarkit
-
-
-class RelayRig:
-    """A group of relay boards and their connected devices.
-
-    This class is also responsible for handling the creation of the relay switch
-    boards, as well as the devices and relays associated with them.
-
-    The boards dict can contain different types of relay boards. They share a
-    common interface through inheriting from RelayBoard. This layer can be
-    ignored by the user.
-
-    The relay devices are stored in a dict of (device_name: device). These
-    device references should be used by the user when they want to directly
-    interface with the relay switches. See RelayDevice or GeneralRelayDevice for
-    implementation.
-
-    """
-    DUPLICATE_ID_ERR_MSG = 'The {} "{}" is not unique. Duplicated in:\n {}'
-
-    # A dict of lambdas that instantiate relay board upon invocation.
-    # The key is the class type name, the value is the lambda.
-    _board_constructors = {
-        'SainSmartBoard':
-        lambda x: SainSmartBoard(x),
-        'RdlRelayBoard':
-        lambda x: RdlRelayBoard(x),
-        'SainSmart8ChannelUsbRelayBoard':
-        lambda x: SainSmart8ChannelUsbRelayBoard(x),
-    }
-
-    # Similar to the dict above, except for devices.
-    _device_constructors = {
-        'GenericRelayDevice': lambda x, rig: GenericRelayDevice(x, rig),
-        'FuguRemote': lambda x, rig: FuguRemote(x, rig),
-        'I6sHeadset': lambda x, rig: I6sHeadset(x, rig),
-        'JaybirdX3Earbuds': lambda x, rig: JaybirdX3Earbuds(x, rig),
-        "LogitechAudioReceiver" :lambda x, rig: LogitechAudioReceiver(x, rig),
-        'SonyXB2Speaker': lambda x, rig: SonyXB2Speaker(x, rig),
-        'SonyXB20Speaker': lambda x, rig: SonyXB20Speaker(x, rig),
-        'TaoTronicsCarkit': lambda x, rig: TaoTronicsCarkit(x, rig),
-        'AkXB10Speaker': lambda x, rig: AkXB10Speaker(x, rig),
-        'SingleButtonDongle': lambda x, rig: SingleButtonDongle(x, rig),
-        'ThreeButtonDongle': lambda x, rig: ThreeButtonDongle(x, rig),
-        'EarstudioReceiver': lambda x, rig: EarstudioReceiver(x, rig),
-        'Headset': lambda x, rig: Headset(x, rig),
-        'Skullcandy': lambda x, rig: Skullcandy(x, rig),
-        'PowerSupply': lambda x, rig: PowerSupply(x, rig),
-    }
-
-    def __init__(self, config):
-        self.relays = dict()
-        self.boards = dict()
-        self.devices = collections.OrderedDict()
-
-        validate_key('boards', config, list, 'relay config file')
-
-        for elem in config['boards']:
-            board = self.create_relay_board(elem)
-            if board.name in self.boards:
-                raise RelayConfigError(
-                    self.DUPLICATE_ID_ERR_MSG.format('name', elem['name'],
-                                                     elem))
-            self.boards[board.name] = board
-
-        # Note: 'boards' is a necessary value, 'devices' is not.
-        if 'devices' in config:
-            for elem in config['devices']:
-                relay_device = self.create_relay_device(elem)
-                if relay_device.name in self.devices:
-                    raise RelayConfigError(
-                        self.DUPLICATE_ID_ERR_MSG.format(
-                            'name', elem['name'], elem))
-                self.devices[relay_device.name] = relay_device
-        else:
-            device_config = dict()
-            device_config['name'] = 'GenericRelayDevice'
-            device_config['relays'] = dict()
-            for relay_id in self.relays:
-                device_config['relays'][relay_id] = relay_id
-            self.devices['device'] = self.create_relay_device(device_config)
-
-    def create_relay_board(self, config):
-        """Builds a RelayBoard from the given config.
-
-        Args:
-            config: An object containing 'type', 'name', 'relays', and
-            (optionally) 'properties'. See the example json file.
-
-        Returns:
-            A RelayBoard with the given type found in the config.
-
-        Raises:
-            RelayConfigError if config['type'] doesn't exist or is not a string.
-
-        """
-        validate_key('type', config, str, '"boards" element')
-        try:
-            ret = self._board_constructors[config['type']](config)
-        except LookupError:
-            raise RelayConfigError(
-                'RelayBoard with type {} not found. Has it been added '
-                'to the _board_constructors dict?'.format(config['type']))
-        for _, relay in ret.relays.items():
-            self.relays[relay.relay_id] = relay
-        return ret
-
-    def create_relay_device(self, config):
-        """Builds a RelayDevice from the given config.
-
-        When given no 'type' key in the config, the function will default to
-        returning a GenericRelayDevice with the relays found in the 'relays'
-        array.
-
-        Args:
-            config: An object containing 'name', 'relays', and (optionally)
-            type.
-
-        Returns:
-            A RelayDevice with the given type found in the config. If no type is
-            found, it will default to GenericRelayDevice.
-
-        Raises:
-            RelayConfigError if the type given does not match any from the
-            _device_constructors dictionary.
-
-        """
-        if 'type' in config:
-            if config['type'] not in RelayRig._device_constructors:
-                raise RelayConfigError(
-                    'Device with type {} not found. Has it been added '
-                    'to the _device_constructors dict?'.format(config['type']))
-            else:
-                device = self._device_constructors[config['type']](config,
-                                                                   self)
-
-        else:
-            device = GenericRelayDevice(config, self)
-
-        return device
diff --git a/src/antlion/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py b/src/antlion/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py
deleted file mode 100644
index 33d7f43..0000000
--- a/src/antlion/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.usb_relay_board_base import UsbRelayBoardBase
-from pylibftdi import BitBangDevice
-""" This library is to control the sainsmart board.
-
-Device:
-    https://www.sainsmart.com/products/8-channel-12v-usb-relay-module
-
-Additional setup steps:
-Change out pip/pip3 and python2.7/3.4 based on python version
-1. pip install pylibftdi
-2. pip install libusb1
-3. sudo apt-get install libftdi-dev
-4. Make this file /etc/udev/rules.d/99-libftdi.rules with root and add the lines below:
-SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", GROUP="plugdev", MODE="0660"
-SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6014", GROUP="plugdev", MODE="0660"
-5. Connect USB relay to computer and power board with necessary connectors
-6. Verify device is found by: python -m pylibftdi.examples.list_devices
-6a. Example output: FTDI:FT245R USB FIFO:A9079L5D
-7. The FIFO value is going to be your device name in the config
-8. Your config should look something like this (note FIFO name is used here):
-
-{
-    "_description": "This is an example skeleton of a ficticious relay.",
-    "testbed": [{
-        "_description": "A testbed with one relay",
-        "name": "relay_test",
-        "RelayDevice": {
-            "boards": [{
-                "type": "SainSmart8ChannelUsbRelayBoard",
-                "name": "ttyUSB0",
-                "device": "A9079L5D"
-            }],
-            "devices": [{
-                "type": "SingleButtonDongle",
-                "name": "aukey",
-                "mac_address": "e9:08:ef:2b:47:a1",
-                "relays": {
-                    "Action": "ttyUSB0/1"
-                }
-
-            }]
-        }
-    }],
-    "logpath": "/tmp/logs",
-    "testpaths": ["../tests"]
-}
-"""
-
-
-class SainSmart8ChannelUsbRelayBoard(UsbRelayBoardBase):
-    def set(self, relay_position, value):
-        """Returns the current status of the passed in relay.
-
-        Note that this board acts in reverse of normal relays.
-        EG: NO = NC and NC = NO
-
-        Args:
-            relay_position: Relay position.
-            value: Turn_on or Turn_off the relay for the given relay_position.
-        """
-        with BitBangDevice(self.device) as bb:
-            if value == RelayState.NO:
-                bb.port &= ~(self.address[relay_position])
-            else:
-                bb.port |= self.address[relay_position]
-        self.status_dict[relay_position] = value
diff --git a/src/antlion/controllers/relay_lib/sain_smart_board.py b/src/antlion/controllers/relay_lib/sain_smart_board.py
deleted file mode 100644
index b5bc310..0000000
--- a/src/antlion/controllers/relay_lib/sain_smart_board.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-from urllib.request import urlopen
-
-from antlion.controllers.relay_lib.errors import RelayDeviceConnectionError
-from antlion.controllers.relay_lib.helpers import validate_key
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.relay_board import RelayBoard
-
-BASE_URL = 'http://192.168.1.4/30000/'
-
-
-class SainSmartBoard(RelayBoard):
-    """Controls and queries SainSmart Web Relay Board.
-
-    Controls and queries SainSmart Web Relay Board, found here:
-    http://www.sainsmart.com/sainsmart-rj45-tcp-ip-remote-controller-board-with-8-channels-relay-integrated.html
-    this uses a web interface to toggle relays.
-
-    There is an unmentioned hidden status page that can be found at <root>/99/.
-    """
-
-    # No longer used. Here for debugging purposes.
-    #
-    # Old status pages. Used before base_url/99 was found.
-    # STATUS_1 = '40'
-    # STATUS_2 = '43'
-    #
-    # This is the regex used to parse the old status pages:
-    # r'y-\d(?P<relay>\d).+?> (?:&nbsp)?(?P<status>.*?)&'
-    #
-    # Pages that will turn all switches on or off, even the ghost switches.
-    # ALL_RELAY_OFF = '44'
-    # ALL_RELAY_ON = '45'
-
-    HIDDEN_STATUS_PAGE = '99'
-
-    VALID_RELAY_POSITIONS = [0, 1, 2, 3, 4, 5, 6, 7]
-    NUM_RELAYS = 8
-
-    def __init__(self, config):
-        # This will be lazy loaded
-        self.status_dict = None
-        self.base_url = validate_key('base_url', config, str, 'config')
-        if not self.base_url.endswith('/'):
-            self.base_url += '/'
-        super(SainSmartBoard, self).__init__(config)
-
-    def get_relay_position_list(self):
-        return self.VALID_RELAY_POSITIONS
-
-    def _load_page(self, relative_url):
-        """Loads a web page at self.base_url + relative_url.
-
-        Properly opens and closes the web page.
-
-        Args:
-            relative_url: The string appended to the base_url.
-
-        Returns:
-            the contents of the web page.
-
-        Raises:
-            A RelayDeviceConnectionError is raised if the page cannot be loaded.
-
-        """
-        try:
-            page = urlopen(self.base_url + relative_url)
-            result = page.read().decode('utf-8')
-            page.close()
-        except IOError:
-            raise RelayDeviceConnectionError(
-                'Unable to connect to board "{}" through {}'.format(
-                    self.name, self.base_url + relative_url))
-        return result
-
-    def _sync_status_dict(self):
-        """Returns a dictionary of relays and there current state."""
-        result = self._load_page(self.HIDDEN_STATUS_PAGE)
-        if 'TUX' not in result:
-            raise RelayDeviceConnectionError(
-                'Sainsmart board with URL %s has not completed initialization '
-                'after its IP was set, and must be power-cycled to prevent '
-                'random disconnections. After power-cycling, make sure %s/%s '
-                'has TUX appear in its output.' %
-                (self.base_url, self.base_url, self.HIDDEN_STATUS_PAGE))
-        status_string = re.search(r'">([01]*)TUX', result).group(1)
-
-        self.status_dict = {}
-        for index, char in enumerate(status_string):
-            self.status_dict[index] = (
-                RelayState.NC if char == '1' else RelayState.NO)
-
-    def _print_status(self):
-        """Prints out the list of relays and their current state."""
-        for i in range(0, 8):
-            print('Relay {}: {}'.format(i, self.status_dict[i]))
-
-    def get_relay_status(self, relay_position):
-        """Returns the current status of the passed in relay."""
-        if self.status_dict is None:
-            self._sync_status_dict()
-        return self.status_dict[relay_position]
-
-    def set(self, relay_position, value):
-        """Sets the given relay to be either ON or OFF, indicated by value."""
-        if self.status_dict is None:
-            self._sync_status_dict()
-        self._load_page(self._get_relay_url_code(relay_position, value))
-        self.status_dict[relay_position] = value
-
-    @staticmethod
-    def _get_relay_url_code(relay_position, no_or_nc):
-        """Returns the two digit code corresponding to setting the relay."""
-        if no_or_nc == RelayState.NC:
-            on_modifier = 1
-        else:
-            on_modifier = 0
-        return '{:02d}'.format(relay_position * 2 + on_modifier)
diff --git a/src/antlion/controllers/relay_lib/skullcandy.py b/src/antlion/controllers/relay_lib/skullcandy.py
deleted file mode 100644
index 078bbfd..0000000
--- a/src/antlion/controllers/relay_lib/skullcandy.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-POWER_TOGGLE_WAIT_TIME = 1
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class Skullcandy(BluetoothRelayDevice):
-    """Skullcandy Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def turn_power_on_and_enter_pairing_mode(self):
-        self._hold_button(Buttons.POWER, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/sony_xb20_speaker.py b/src/antlion/controllers/relay_lib/sony_xb20_speaker.py
deleted file mode 100644
index 942a812..0000000
--- a/src/antlion/controllers/relay_lib/sony_xb20_speaker.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 6
-POWER_TOGGLE_WAIT_TIME = 1
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class SonyXB20Speaker(BluetoothRelayDevice):
-    """Sony XB20 Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_on(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        self._hold_button(Buttons.POWER, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/sony_xb2_speaker.py b/src/antlion/controllers/relay_lib/sony_xb2_speaker.py
deleted file mode 100644
index 9c97c35..0000000
--- a/src/antlion/controllers/relay_lib/sony_xb2_speaker.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-POWER_ON_WAIT_TIME = 2
-POWER_OFF_WAIT_TIME = 6
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-    PAIR = 'Pair'
-
-
-class SonyXB2Speaker(BluetoothRelayDevice):
-    """Sony XB2 Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_on(self):
-        self._hold_button(Buttons.POWER, POWER_ON_WAIT_TIME)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_OFF_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        self._hold_button(Buttons.PAIR, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/tao_tronics_headset.py b/src/antlion/controllers/relay_lib/tao_tronics_headset.py
deleted file mode 100644
index 88bb61f..0000000
--- a/src/antlion/controllers/relay_lib/tao_tronics_headset.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-WAIT_TIME = 0.05
-
-
-class Buttons(enum.Enum):
-    NEXT = 'Next'
-    PREVIOUS = "Previous"
-    PLAY_PAUSE = 'Play_pause'
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-
-
-class TaoTronicsCarkit(BluetoothRelayDevice):
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def press_play_pause(self):
-        """
-        Sets relay to
-            Play state : if there is no A2DP_streaming.
-            Pause state : if there is A2DP_streaming.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_no_for(WAIT_TIME)
-
-    def press_next(self):
-        """Skips to next song from relay_device."""
-        self.relays[Buttons.NEXT.value].set_no_for(WAIT_TIME)
-
-    def press_previous(self):
-        """Skips to previous song from relay_device."""
-        self.relays[Buttons.PREVIOUS.value].set_no_for(WAIT_TIME)
-
-    def press_volume_up(self):
-        """Increases volume from relay_device."""
-        self.relays[Buttons.VOLUME_UP.value].set_no_for(WAIT_TIME)
-
-    def press_volume_down(self):
-        """Decreases volume from relay_device."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_no_for(WAIT_TIME)
-
-    def press_initiate_call(self):
-        """Initiate call from relay device."""
-        for i in range(0, 2):
-            self.press(Buttons.PLAY_PAUSE.value)
-            time.sleep(0.2)
-        return True
-
-    def press_accept_call(self):
-        """Accepts call from relay device."""
-        self.press(Buttons.PLAY_PAUSE.value)
-        return True
diff --git a/src/antlion/controllers/relay_lib/usb_relay_board_base.py b/src/antlion/controllers/relay_lib/usb_relay_board_base.py
deleted file mode 100644
index 45422eb..0000000
--- a/src/antlion/controllers/relay_lib/usb_relay_board_base.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.relay_board import RelayBoard
-from pylibftdi import BitBangDevice
-
-
-class UsbRelayBoardBase(RelayBoard):
-
-    VALID_RELAY_POSITIONS = [1, 2, 3, 4, 5, 6, 7, 8]
-    NUM_RELAYS = 8
-
-    def __init__(self, config):
-        self.status_dict = dict()
-        self.device = config["device"]
-        super(UsbRelayBoardBase, self).__init__(config)
-        self.address = {
-            1: 0x1,
-            2: 0x2,
-            3: 0x4,
-            4: 0x8,
-            5: 0x10,
-            6: 0x20,
-            7: 0x40,
-            8: 0x80,
-            "select_all": 0xFF
-        }
-
-    def get_relay_position_list(self):
-        return self.VALID_RELAY_POSITIONS
-
-    def test_bit(self, int_type, offset):
-        """Function to get status for the given relay position.
-
-        Args:
-            int_type: Port value for given relay.
-            offset: offset for given Relay_position.
-
-        Returns:
-            returns current status for given relay_position.
-        """
-        mask = 1 << offset
-        return (int_type & mask)
-
-    def _get_relay_state(self, data, relay):
-        """Function to get status for the given relay position.
-
-        Args:
-            data: Port value for given relay.
-            relay: Relay_position.
-
-        Returns:
-            returns current status for given relay_position.
-        """
-        if relay == 1:
-            return self.test_bit(data, 1)
-        if relay == 2:
-            return self.test_bit(data, 3)
-        if relay == 3:
-            return self.test_bit(data, 5)
-        if relay == 4:
-            return self.test_bit(data, 7)
-        if relay == 5:
-            return self.test_bit(data, 2)
-        if relay == 6:
-            return self.test_bit(data, 4)
-        if relay == 7:
-            return self.test_bit(data, 6)
-        if relay == 8:
-            return self.test_bit(data, 8)
-
-    def get_relay_status(self, relay_position):
-        """Get relay status for the given relay position.
-
-        Args:
-            relay_position: Status for given Relay position.
-
-        Returns:
-            returns current status for given relay_position.
-        """
-        with BitBangDevice(self.device) as bb:
-            self.status_dict[relay_position] = self._get_relay_state(
-                bb.port, relay_position)
-        return self.status_dict[relay_position]
-
-    def set(self, relay_position, value):
-        """Returns the current status of the passed in relay.
-
-        Args:
-            relay_position: Relay position.
-            value: Turn_on or Turn_off the relay for the given relay_position.
-        """
-        raise NotImplementedError
diff --git a/src/antlion/controllers/rohdeschwarz_lib/OWNERS b/src/antlion/controllers/rohdeschwarz_lib/OWNERS
deleted file mode 100644
index e4010df..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/rohdeschwarz_lib/__init__.py b/src/antlion/controllers/rohdeschwarz_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmw500.py b/src/antlion/controllers/rohdeschwarz_lib/cmw500.py
deleted file mode 100644
index 978600d..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmw500.py
+++ /dev/null
@@ -1,1167 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from enum import Enum
-
-from antlion.controllers import abstract_inst
-
-LTE_ATTACH_RESP = 'ATT'
-LTE_CONN_RESP = 'CONN'
-LTE_IDLE_RESP = 'IDLE'
-LTE_PSWITCHED_ON_RESP = 'ON'
-LTE_PSWITCHED_OFF_RESP = 'OFF'
-
-STATE_CHANGE_TIMEOUT = 20
-
-
-class LteState(Enum):
-    """LTE ON and OFF"""
-    LTE_ON = 'ON'
-    LTE_OFF = 'OFF'
-
-
-class BtsNumber(Enum):
-    """Base station Identifiers."""
-    BTS1 = 'PCC'
-    BTS2 = 'SCC1'
-    BTS3 = 'SCC2'
-    BTS4 = 'SCC3'
-    BTS5 = 'SCC4'
-    BTS6 = 'SCC6'
-    BTS7 = 'SCC7'
-
-
-class LteBandwidth(Enum):
-    """Supported LTE bandwidths."""
-    BANDWIDTH_1MHz = 'B014'
-    BANDWIDTH_3MHz = 'B030'
-    BANDWIDTH_5MHz = 'B050'
-    BANDWIDTH_10MHz = 'B100'
-    BANDWIDTH_15MHz = 'B150'
-    BANDWIDTH_20MHz = 'B200'
-
-
-class DuplexMode(Enum):
-    """Duplex Modes"""
-    FDD = 'FDD'
-    TDD = 'TDD'
-
-
-class SchedulingMode(Enum):
-    """Supported scheduling modes."""
-    RMC = 'RMC'
-    USERDEFINEDCH = 'UDCHannels'
-
-
-class TransmissionModes(Enum):
-    """Supported transmission modes."""
-    TM1 = 'TM1'
-    TM2 = 'TM2'
-    TM3 = 'TM3'
-    TM4 = 'TM4'
-    TM7 = 'TM7'
-    TM8 = 'TM8'
-    TM9 = 'TM9'
-
-
-class UseCarrierSpecific(Enum):
-    """Enable or disable carrier specific."""
-    UCS_ON = 'ON'
-    UCS_OFF = 'OFF'
-
-
-class RbPosition(Enum):
-    """Supported RB positions."""
-    LOW = 'LOW'
-    HIGH = 'HIGH'
-    P5 = 'P5'
-    P10 = 'P10'
-    P23 = 'P23'
-    P35 = 'P35'
-    P48 = 'P48'
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    QPSK = 'QPSK'
-    Q16 = 'Q16'
-    Q64 = 'Q64'
-    Q256 = 'Q256'
-
-
-class DciFormat(Enum):
-    """Support DCI Formats for MIMOs"""
-    D1 = 'D1'
-    D1A = 'D1A'
-    D1B = 'D1B'
-    D2 = 'D2'
-    D2A = 'D2A'
-    D2B = 'D2B'
-    D2C = 'D2C'
-
-
-class MimoModes(Enum):
-    """MIMO Modes dl antennas"""
-    MIMO1x1 = 'ONE'
-    MIMO2x2 = 'TWO'
-    MIMO4x4 = 'FOUR'
-
-
-class MimoScenario(Enum):
-    """Supported mimo scenarios"""
-    SCEN1x1 = 'SCELl:FLEXible SUA1,RF1C,RX1,RF1C,TX1'
-    SCEN2x2 = 'TRO:FLEXible SUA1,RF1C,RX1,RF1C,TX1,RF3C,TX2'
-    SCEN4x4 = 'FRO FLEXible SUA1,RF1C,RX1,RF1C,TX1,RF3C,TX2,RF2C,TX3,RF4C,TX4'
-
-
-class RrcState(Enum):
-    """States to enable/disable rrc."""
-    RRC_ON = 'ON'
-    RRC_OFF = 'OFF'
-
-
-class MacPadding(Enum):
-    """Enables/Disables Mac Padding."""
-    ON = 'ON'
-    OFF = 'OFF'
-
-
-class ConnectionType(Enum):
-    """Supported Connection Types."""
-    TEST = 'TESTmode'
-    DAU = 'DAPPlication'
-
-
-class RepetitionMode(Enum):
-    """Specifies LTE Measurement Repetition Mode."""
-    SINGLESHOT = 'SINGleshot'
-    CONTINUOUS = 'CONTinuous'
-
-
-class TpcPowerControl(Enum):
-    """Specifies Up Link power control types."""
-    MIN_POWER = 'MINPower'
-    MAX_POWER = 'MAXPower'
-    CONSTANT = 'CONStant'
-    SINGLE = 'SINGle'
-    UDSINGLE = 'UDSingle'
-    UDCONTINUOUS = 'UDContinuous'
-    ALTERNATE = 'ALT0'
-    CLOSED_LOOP = 'CLOop'
-    RP_CONTROL = 'RPControl'
-    FLEX_POWER = 'FULPower'
-
-
-class ReducedPdcch(Enum):
-    """Enables/disables the reduction of PDCCH resources."""
-    ON = 'ON'
-    OFF = 'OFF'
-
-
-class Cmw500(abstract_inst.SocketInstrument):
-
-    def __init__(self, ip_addr, port):
-        """Init method to setup variables for controllers.
-
-        Args:
-              ip_addr: Controller's ip address.
-              port: Port
-        """
-        super(Cmw500, self).__init__(ip_addr, port)
-        self._connect_socket()
-        self._send('*CLS')
-        self._send('*ESE 0;*SRE 0')
-        self._send('*CLS')
-        self._send('*ESE 1;*SRE 4')
-        self._send('SYST:DISP:UPD ON')
-
-    def switch_lte_signalling(self, state):
-        """ Turns LTE signalling ON/OFF.
-
-        Args:
-              state: an instance of LteState indicating the state to which LTE
-                signal has to be set.
-        """
-        if not isinstance(state, LteState):
-            raise ValueError('state should be the instance of LteState.')
-
-        state = state.value
-
-        cmd = 'SOURce:LTE:SIGN:CELL:STATe {}'.format(state)
-        self.send_and_recv(cmd)
-
-        time_elapsed = 0
-        while time_elapsed < STATE_CHANGE_TIMEOUT:
-            response = self.send_and_recv('SOURce:LTE:SIGN:CELL:STATe:ALL?')
-
-            if response == state + ',ADJ':
-                self._logger.info('LTE signalling is now {}.'.format(state))
-                break
-
-            # Wait for a second and increase time count by one
-            time.sleep(1)
-            time_elapsed += 1
-        else:
-            raise CmwError('Failed to turn {} LTE signalling.'.format(state))
-
-    def enable_packet_switching(self):
-        """Enable packet switching in call box."""
-        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion CONNect')
-        self.wait_for_pswitched_state()
-
-    def disable_packet_switching(self):
-        """Disable packet switching in call box."""
-        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion DISConnect')
-        self.wait_for_pswitched_state()
-
-    @property
-    def use_carrier_specific(self):
-        """Gets current status of carrier specific duplex configuration."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:DMODe:UCSPECific?')
-
-    @use_carrier_specific.setter
-    def use_carrier_specific(self, state):
-        """Sets the carrier specific duplex configuration.
-
-        Args:
-            state: ON/OFF UCS configuration.
-        """
-        cmd = 'CONFigure:LTE:SIGN:DMODe:UCSPECific {}'.format(state)
-        self.send_and_recv(cmd)
-
-    def send_and_recv(self, cmd):
-        """Send and recv the status of the command.
-
-        Args:
-            cmd: Command to send.
-
-        Returns:
-            status: returns the status of the command sent.
-        """
-
-        self._send(cmd)
-        if '?' in cmd:
-            status = self._recv()
-            return status
-
-    def configure_mimo_settings(self, mimo):
-        """Sets the mimo scenario for the test.
-
-        Args:
-            mimo: mimo scenario to set.
-        """
-        cmd = 'ROUTe:LTE:SIGN:SCENario:{}'.format(mimo.value)
-        self.send_and_recv(cmd)
-
-    def wait_for_pswitched_state(self, timeout=10):
-        """Wait until pswitched state.
-
-        Args:
-            timeout: timeout for lte pswitched state.
-
-        Raises:
-            CmwError on timeout.
-        """
-        while timeout > 0:
-            state = self.send_and_recv('FETCh:LTE:SIGN:PSWitched:STATe?')
-            if state == LTE_PSWITCHED_ON_RESP:
-                self._logger.debug('Connection to setup initiated.')
-                break
-            elif state == LTE_PSWITCHED_OFF_RESP:
-                self._logger.debug('Connection to setup detached.')
-                break
-
-            # Wait for a second and decrease count by one
-            time.sleep(1)
-            timeout -= 1
-        else:
-            raise CmwError('Failure in setting up/detaching connection')
-
-    def wait_for_attached_state(self, timeout=120):
-        """Attach the controller with device.
-
-        Args:
-            timeout: timeout for phone to get attached.
-
-        Raises:
-            CmwError on time out.
-        """
-        while timeout > 0:
-            state = self.send_and_recv('FETCh:LTE:SIGN:PSWitched:STATe?')
-
-            if state == LTE_ATTACH_RESP:
-                self._logger.debug('Call box attached with device')
-                break
-
-            # Wait for a second and decrease count by one
-            time.sleep(1)
-            timeout -= 1
-        else:
-            raise CmwError('Device could not be attached')
-
-    def wait_for_rrc_state(self, state, timeout=120):
-        """ Waits until a certain RRC state is set.
-
-        Args:
-            state: the RRC state that is being waited for.
-            timeout: timeout for phone to be in connected state.
-
-        Raises:
-            CmwError on time out.
-        """
-        if state not in [LTE_CONN_RESP, LTE_IDLE_RESP]:
-            raise ValueError(
-                'The allowed values for state are {} and {}.'.format(
-                    LTE_CONN_RESP, LTE_IDLE_RESP))
-
-        while timeout > 0:
-            new_state = self.send_and_recv('SENSe:LTE:SIGN:RRCState?')
-
-            if new_state == state:
-                self._logger.debug('The RRC state is {}.'.format(new_state))
-                break
-
-            # Wait for a second and decrease count by one
-            time.sleep(1)
-            timeout -= 1
-        else:
-            raise CmwError('Timeout before RRC state was {}.'.format(state))
-
-    def reset(self):
-        """System level reset"""
-        self.send_and_recv('*RST; *OPC')
-
-    @property
-    def get_instrument_id(self):
-        """Gets instrument identification number"""
-        return self.send_and_recv('*IDN?')
-
-    def disconnect(self):
-        """Disconnect controller from device and switch to local mode."""
-        self.switch_lte_signalling(LteState.LTE_OFF)
-        self.close_remote_mode()
-        self._close_socket()
-
-    def close_remote_mode(self):
-        """Exits remote mode to local mode."""
-        self.send_and_recv('&GTL')
-
-    def detach(self):
-        """Detach callbox and controller."""
-        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion DETach')
-
-    @property
-    def rrc_connection(self):
-        """Gets the RRC connection state."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:KRRC?')
-
-    @rrc_connection.setter
-    def rrc_connection(self, state):
-        """Selects whether the RRC connection is kept or released after attach.
-
-        Args:
-            mode: RRC State ON/OFF.
-        """
-        if not isinstance(state, RrcState):
-            raise ValueError('state should be the instance of RrcState.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:KRRC {}'.format(state.value)
-        self.send_and_recv(cmd)
-
-    @property
-    def rrc_connection_timer(self):
-        """Gets the inactivity timeout for disabled rrc connection."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:RITimer?')
-
-    @rrc_connection_timer.setter
-    def rrc_connection_timer(self, time_in_secs):
-        """Sets the inactivity timeout for disabled rrc connection. By default
-        the timeout is set to 5.
-
-        Args:
-            time_in_secs: timeout of inactivity in rrc connection.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:RITimer {}'.format(time_in_secs)
-        self.send_and_recv(cmd)
-
-    @property
-    def dl_mac_padding(self):
-        """Gets the state of mac padding."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:DLPadding?')
-
-    @dl_mac_padding.setter
-    def dl_mac_padding(self, state):
-        """Enables/Disables downlink padding at the mac layer.
-
-        Args:
-            state: ON/OFF
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:DLPadding {}'.format(state.value)
-        self.send_and_recv(cmd)
-
-    @property
-    def connection_type(self):
-        """Gets the connection type applied in callbox."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:CTYPe?')
-
-    @connection_type.setter
-    def connection_type(self, ctype):
-        """Sets the connection type to be applied.
-
-        Args:
-            ctype: Connection type.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:CTYPe {}'.format(ctype.value)
-        self.send_and_recv(cmd)
-
-    def get_base_station(self, bts_num=BtsNumber.BTS1):
-        """Gets the base station object based on bts num. By default
-        bts_num set to PCC
-
-        Args:
-            bts_num: base station identifier
-
-        Returns:
-            base station object.
-        """
-        return BaseStation(self, bts_num)
-
-    def init_lte_measurement(self):
-        """Gets the class object for lte measurement which can be used to
-        initiate measurements.
-
-        Returns:
-            lte measurement object.
-        """
-        return LteMeasurement(self)
-
-
-class BaseStation(object):
-    """Class to interact with different base stations"""
-
-    def __init__(self, cmw, bts_num):
-        if not isinstance(bts_num, BtsNumber):
-            raise ValueError('bts_num should be an instance of BtsNumber.')
-        self._bts = bts_num.value
-        self._cmw = cmw
-
-    @property
-    def duplex_mode(self):
-        """Gets current duplex of cell."""
-        cmd = 'CONFigure:LTE:SIGN:{}:DMODe?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @duplex_mode.setter
-    def duplex_mode(self, mode):
-        """Sets the Duplex mode of cell.
-
-        Args:
-            mode: String indicating FDD or TDD.
-        """
-        if not isinstance(mode, DuplexMode):
-            raise ValueError('mode should be an instance of DuplexMode.')
-
-        cmd = 'CONFigure:LTE:SIGN:{}:DMODe {}'.format(self._bts, mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def band(self):
-        """Gets the current band of cell."""
-        cmd = 'CONFigure:LTE:SIGN:{}:BAND?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @band.setter
-    def band(self, band):
-        """Sets the Band of cell.
-
-        Args:
-            band: band of cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:{}:BAND {}'.format(self._bts, band)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dl_channel(self):
-        """Gets the downlink channel of cell."""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dl_channel.setter
-    def dl_channel(self, channel):
-        """Sets the downlink channel number of cell.
-
-        Args:
-            channel: downlink channel number of cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL {}'.format(
-            self._bts, channel)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def ul_channel(self):
-        """Gets the uplink channel of cell."""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @ul_channel.setter
-    def ul_channel(self, channel):
-        """Sets the up link channel number of cell.
-
-        Args:
-            channel: up link channel number of cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL {}'.format(
-            self._bts, channel)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def bandwidth(self):
-        """Get the channel bandwidth of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:CELL:BANDwidth:{}:DL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @bandwidth.setter
-    def bandwidth(self, bandwidth):
-        """Sets the channel bandwidth of the cell.
-
-        Args:
-            bandwidth: channel bandwidth of cell.
-        """
-        if not isinstance(bandwidth, LteBandwidth):
-            raise ValueError('bandwidth should be an instance of '
-                             'LteBandwidth.')
-        cmd = 'CONFigure:LTE:SIGN:CELL:BANDwidth:{}:DL {}'.format(
-            self._bts, bandwidth.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def ul_frequency(self):
-        """Get the uplink frequency of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL? MHZ'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @ul_frequency.setter
-    def ul_frequency(self, freq):
-        """Get the uplink frequency of the cell.
-
-        Args:
-            freq: uplink frequency of the cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL {} MHZ'.format(
-            self._bts, freq)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dl_frequency(self):
-        """Get the downlink frequency of the cell"""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL? MHZ'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dl_frequency.setter
-    def dl_frequency(self, freq):
-        """Get the downlink frequency of the cell.
-
-        Args:
-            freq: downlink frequency of the cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL {} MHZ'.format(
-            self._bts, freq)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def transmode(self):
-        """Gets the TM of cell."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:TRANsmission?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @transmode.setter
-    def transmode(self, tm_mode):
-        """Sets the TM of cell.
-
-        Args:
-            tm_mode: TM of cell.
-        """
-        if not isinstance(tm_mode, TransmissionModes):
-            raise ValueError('tm_mode should be an instance of '
-                             'Transmission modes.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:TRANsmission {}'.format(
-            self._bts, tm_mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def downlink_power_level(self):
-        """Gets RSPRE level."""
-        cmd = 'CONFigure:LTE:SIGN:DL:{}:RSEPre:LEVel?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @downlink_power_level.setter
-    def downlink_power_level(self, pwlevel):
-        """Modifies RSPRE level.
-
-        Args:
-            pwlevel: power level in dBm.
-        """
-        cmd = 'CONFigure:LTE:SIGN:DL:{}:RSEPre:LEVel {}'.format(
-            self._bts, pwlevel)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def uplink_power_control(self):
-        """Gets open loop nominal power directly."""
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:OLNPower?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @uplink_power_control.setter
-    def uplink_power_control(self, ul_power):
-        """Sets open loop nominal power directly.
-
-        Args:
-            ul_power: uplink power level.
-        """
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:OLNPower {}'.format(
-            self._bts, ul_power)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def uldl_configuration(self):
-        """Gets uldl configuration of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:ULDL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @uldl_configuration.setter
-    def uldl_configuration(self, uldl):
-        """Sets the ul-dl configuration.
-
-        Args:
-            uldl: Configuration value ranging from 0 to 6.
-        """
-        if uldl not in range(0, 7):
-            raise ValueError('uldl configuration value should be between'
-                             ' 0 and 6 inclusive.')
-
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:ULDL {}'.format(self._bts, uldl)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def tdd_special_subframe(self):
-        """Gets special subframe of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:SSUBframe?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @tdd_special_subframe.setter
-    def tdd_special_subframe(self, sframe):
-        """Sets the tdd special subframe of the cell.
-
-        Args:
-            sframe: Integer value ranging from 1 to 9.
-        """
-        if sframe not in range(0, 10):
-            raise ValueError('tdd special subframe should be between 0 and 9'
-                             ' inclusive.')
-
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:SSUBframe {}'.format(
-            self._bts, sframe)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def scheduling_mode(self):
-        """Gets the current scheduling mode."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:STYPe?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @scheduling_mode.setter
-    def scheduling_mode(self, mode):
-        """Sets the scheduling type for the cell.
-
-        Args:
-            mode: Selects the channel mode to be scheduled.
-        """
-        if not isinstance(mode, SchedulingMode):
-            raise ValueError('mode should be the instance of scheduling mode.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:STYPe {}'.format(
-            self._bts, mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def rb_configuration_dl(self):
-        """Gets rmc's rb configuration for down link. This function returns
-        Number of Resource blocks, Resource block position and Modulation type.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:{}:DL?'.format(
-            self._bts, self.scheduling_mode)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_configuration_dl.setter
-    def rb_configuration_dl(self, rb_config):
-        """Sets the rb configuration for down link for scheduling type.
-
-        Args:
-            rb_config: Tuple containing Number of resource blocks, resource
-            block position and modulation type.
-
-        Raises:
-            ValueError: If tuple unpacking fails.
-        """
-        if self.scheduling_mode == 'RMC':
-            rb, rb_pos, modulation = rb_config
-
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:RMC:DL {},{},'
-                   '{}'.format(self._bts, rb, rb_pos, modulation))
-            self._cmw.send_and_recv(cmd)
-
-        elif self.scheduling_mode == 'UDCH':
-            rb, start_rb, modulation, tbs = rb_config
-
-            self.validate_rb(rb)
-
-            if not isinstance(modulation, ModulationType):
-                raise ValueError('Modulation should be of type '
-                                 'ModulationType.')
-
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:UDCHannels:DL {},{},'
-                   '{},{}'.format(self._bts, rb, start_rb, modulation.value,
-                                  tbs))
-            self._cmw.send_and_recv(cmd)
-
-    @property
-    def rb_configuration_ul(self):
-        """Gets rb configuration for up link. This function returns
-        Number of Resource blocks, Resource block position and Modulation type.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:{}:UL?'.format(
-            self._bts, self.scheduling_mode)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_configuration_ul.setter
-    def rb_configuration_ul(self, rb_config):
-        """Sets the rb configuration for down link for scheduling mode.
-
-        Args:
-            rb_config: Tuple containing Number of resource blocks, resource
-            block position and modulation type.
-
-        Raises:
-            ValueError: If tuple unpacking fails.
-        """
-        if self.scheduling_mode == 'RMC':
-            rb, rb_pos, modulation = rb_config
-
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:RMC:UL {},{},'
-                   '{}'.format(self._bts, rb, rb_pos, modulation))
-            self._cmw.send_and_recv(cmd)
-
-        elif self.scheduling_mode == 'UDCH':
-            rb, start_rb, modulation, tbs = rb_config
-
-            self.validate_rb(rb)
-
-            if not isinstance(modulation, ModulationType):
-                raise ValueError('Modulation should be of type '
-                                 'ModulationType.')
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:UDCHannels:UL {},{},'
-                   '{},{}'.format(self._bts, rb, start_rb, modulation.value,
-                                  tbs))
-            self._cmw.send_and_recv(cmd)
-
-    def validate_rb(self, rb):
-        """Validates if rb is within the limits for bandwidth set.
-
-        Args:
-            rb: No. of resource blocks.
-
-        Raises:
-            ValueError if rb out of range.
-        """
-        bandwidth = self.bandwidth
-
-        if bandwidth == LteBandwidth.BANDWIDTH_1MHz.value:
-            if not 0 <= rb <= 6:
-                raise ValueError('RB should be between 0 to 6 inclusive'
-                                 ' for 1.4Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_3MHz.value:
-            if not 0 <= rb <= 10:
-                raise ValueError('RB should be between 0 to 10 inclusive'
-                                 ' for 3 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_5MHz.value:
-            if not 0 <= rb <= 25:
-                raise ValueError('RB should be between 0 to 25 inclusive'
-                                 ' for 5 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_10MHz.value:
-            if not 0 <= rb <= 50:
-                raise ValueError('RB should be between 0 to 50 inclusive'
-                                 ' for 10 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_15MHz.value:
-            if not 0 <= rb <= 75:
-                raise ValueError('RB should be between 0 to 75 inclusive'
-                                 ' for 15 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_20MHz.value:
-            if not 0 <= rb <= 100:
-                raise ValueError('RB should be between 0 to 100 inclusive'
-                                 ' for 20 Mhz.')
-
-    @property
-    def rb_position_dl(self):
-        """Gets the position of the allocated down link resource blocks within
-        the channel band-width.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:DL?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_position_dl.setter
-    def rb_position_dl(self, rbpos):
-        """Selects the position of the allocated down link resource blocks
-        within the channel band-width
-
-        Args:
-            rbpos: position of resource blocks.
-        """
-        if not isinstance(rbpos, RbPosition):
-            raise ValueError('rbpos should be the instance of RbPosition.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:DL {}'.format(
-            self._bts, rbpos.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def rb_position_ul(self):
-        """Gets the position of the allocated up link resource blocks within
-        the channel band-width.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:UL?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_position_ul.setter
-    def rb_position_ul(self, rbpos):
-        """Selects the position of the allocated up link resource blocks
-        within the channel band-width.
-
-        Args:
-            rbpos: position of resource blocks.
-        """
-        if not isinstance(rbpos, RbPosition):
-            raise ValueError('rbpos should be the instance of RbPosition.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:UL {}'.format(
-            self._bts, rbpos.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dci_format(self):
-        """Gets the downlink control information (DCI) format."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:DCIFormat?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dci_format.setter
-    def dci_format(self, dci_format):
-        """Selects the downlink control information (DCI) format.
-
-        Args:
-            dci_format: supported dci.
-        """
-        if not isinstance(dci_format, DciFormat):
-            raise ValueError('dci_format should be the instance of DciFormat.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:DCIFormat {}'.format(
-            self._bts, dci_format)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dl_antenna(self):
-        """Gets dl antenna count of cell."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:NENBantennas?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dl_antenna.setter
-    def dl_antenna(self, num_antenna):
-        """Sets the dl antenna count of cell.
-
-        Args:
-            num_antenna: Count of number of dl antennas to use.
-        """
-        if not isinstance(num_antenna, MimoModes):
-            raise ValueError('num_antenna should be an instance of MimoModes.')
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:NENBantennas {}'.format(
-            self._bts, num_antenna)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def reduced_pdcch(self):
-        """Gets the reduction of PDCCH resources state."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:PDCCh:RPDCch?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @reduced_pdcch.setter
-    def reduced_pdcch(self, state):
-        """Sets the reduction of PDCCH resources state.
-
-        Args:
-            state: ON/OFF.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:PDCCh:RPDCch {}'.format(
-            self._bts, state.value)
-        self._cmw.send_and_recv(cmd)
-
-    def tpc_power_control(self, set_type):
-        """Set and execute the Up Link Power Control via TPC.
-
-        Args:
-            set_type: Type of tpc power control.
-        """
-
-        if not isinstance(set_type, TpcPowerControl):
-            raise ValueError('set_type should be the instance of '
-                             'TpCPowerControl.')
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:SET {}'.format(
-            self._bts, set_type.value)
-        self._cmw.send_and_recv(cmd)
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:PEXecute'.format(self._bts)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def tpc_closed_loop_target_power(self):
-        """Gets the target powers for power control with the TPC setup."""
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:CLTPower?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @tpc_closed_loop_target_power.setter
-    def tpc_closed_loop_target_power(self, cltpower):
-        """Sets the target powers for power control with the TPC setup.
-
-        Args:
-            tpower: Target power.
-        """
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:CLTPower {}'.format(
-            self._bts, cltpower)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def drx_connected_mode(self):
-        """ Gets the Connected DRX LTE cell parameter
-
-        Args:
-            None
-
-        Returns:
-            DRX connected mode (OFF, AUTO, MANUAL)
-        """
-        raise NotImplementedError()
-
-    @drx_connected_mode.setter
-    def drx_connected_mode(self, mode):
-        """  Sets the Connected DRX LTE cell parameter
-
-        Args:
-            mode: DRX Connected mode
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_on_duration_timer(self):
-        """ Gets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            None
-
-        Returns:
-            DRX mode duration timer
-        """
-        raise NotImplementedError()
-
-    @drx_on_duration_timer.setter
-    def drx_on_duration_timer(self, time):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            timer: Length of interval to wait for user data to be transmitted
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_inactivity_timer(self):
-        """ Gets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            None
-
-        Returns:
-            DRX mode inactivity timer
-        """
-        raise NotImplementedError()
-
-    @drx_inactivity_timer.setter
-    def drx_inactivity_timer(self, time):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            timer: Length of the interval to wait
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_retransmission_timer(self):
-        """ Gets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            None
-
-        Returns:
-            Number of PDCCH subframes to wait for retransmission
-        """
-        raise NotImplementedError()
-
-    @drx_retransmission_timer.setter
-    def drx_retransmission_timer(self, time):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            time: Number of PDCCH subframes to wait
-            for retransmission
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_long_cycle(self):
-        """ Gets the amount of subframes representing a DRX long cycle
-
-        Args:
-            None
-
-        Returns:
-            The amount of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        """
-        raise NotImplementedError()
-
-    @drx_long_cycle.setter
-    def drx_long_cycle(self, time):
-        """ Sets the amount of subframes representing a DRX long cycle
-
-        Args:
-            long_cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_long_cycle_offset(self):
-        """ Gets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            None
-
-        Returns:
-            Long cycle offset
-        """
-        raise NotImplementedError()
-
-    @drx_long_cycle_offset.setter
-    def drx_long_cycle_offset(self, offset):
-        """ Sets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            offset: Number in range 0...(long cycle - 1)
-        """
-        raise NotImplementedError()
-
-
-
-class LteMeasurement(object):
-
-    def __init__(self, cmw):
-        self._cmw = cmw
-
-    def intitilize_measurement(self):
-        """Initialize measurement modules."""
-        self._cmw.send_and_recv('INIT:LTE:MEAS:MEValuation')
-
-    @property
-    def measurement_repetition(self):
-        """Returns the measurement repetition mode that has been set."""
-        return self._cmw.send_and_recv(
-            'CONFigure:LTE:MEAS:MEValuation:REPetition?')
-
-    @measurement_repetition.setter
-    def measurement_repetition(self, mode):
-        """Sets the mode for measuring power levels.
-
-        Args:
-            mode: Single shot/continuous.
-        """
-        if not isinstance(mode, RepetitionMode):
-            raise ValueError('mode should be the instance of Repetition Mode')
-
-        cmd = 'CONFigure:LTE:MEAS:MEValuation:REPetition {}'.format(mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def query_measurement_state(self):
-        """Returns the states and sub states of measurement."""
-        return self._cmw.send_and_recv('FETCh:LTE:MEAS:MEValuation:STATe:ALL?')
-
-    @property
-    def measure_tx_power(self):
-        """Return the current Tx power measurement."""
-        return self._cmw.send_and_recv(
-            'FETCh:LTE:MEAS:MEValuation:PMONitor:AVERage?')
-
-    def stop_measurement(self):
-        """Stops the on-going measurement.
-        This function call does not free up resources allocated for
-        measurement. Instead it moves from RUN to RDY state.
-        """
-        self._cmw.send_and_recv('STOP:LTE:MEAS:MEValuation')
-
-    def abort_measurement(self):
-        """Aborts the measurement abruptly.
-        This function call will free up the resources allocated for
-        measurement and all the results will be wiped off.
-        """
-        self._cmw.send_and_recv('ABORt:LTE:MEAS:MEValuation')
-
-
-class CmwError(Exception):
-    """Class to raise exceptions related to cmw."""
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmw500_cellular_simulator.py b/src/antlion/controllers/rohdeschwarz_lib/cmw500_cellular_simulator.py
deleted file mode 100644
index a65042d..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmw500_cellular_simulator.py
+++ /dev/null
@@ -1,579 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-
-from antlion.controllers.rohdeschwarz_lib import cmw500
-from antlion.controllers import cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-
-CMW_TM_MAPPING = {
-    LteSimulation.TransmissionMode.TM1: cmw500.TransmissionModes.TM1,
-    LteSimulation.TransmissionMode.TM2: cmw500.TransmissionModes.TM2,
-    LteSimulation.TransmissionMode.TM3: cmw500.TransmissionModes.TM3,
-    LteSimulation.TransmissionMode.TM4: cmw500.TransmissionModes.TM4,
-    LteSimulation.TransmissionMode.TM7: cmw500.TransmissionModes.TM7,
-    LteSimulation.TransmissionMode.TM8: cmw500.TransmissionModes.TM8,
-    LteSimulation.TransmissionMode.TM9: cmw500.TransmissionModes.TM9
-}
-
-CMW_SCH_MAPPING = {
-    LteSimulation.SchedulingMode.STATIC: cmw500.SchedulingMode.USERDEFINEDCH
-}
-
-CMW_MIMO_MAPPING = {
-    LteSimulation.MimoMode.MIMO_1x1: cmw500.MimoModes.MIMO1x1,
-    LteSimulation.MimoMode.MIMO_2x2: cmw500.MimoModes.MIMO2x2,
-    LteSimulation.MimoMode.MIMO_4x4: cmw500.MimoModes.MIMO4x4
-}
-
-# get mcs vs tbsi map with 256-qam disabled(downlink)
-get_mcs_tbsi_map_dl = {
-    cmw500.ModulationType.QPSK: {
-        0: 0,
-        1: 1,
-        2: 2,
-        3: 3,
-        4: 4,
-        5: 5,
-        6: 6,
-        7: 7,
-        8: 8,
-        9: 9
-    },
-    cmw500.ModulationType.Q16: {
-        10: 9,
-        11: 10,
-        12: 11,
-        13: 12,
-        14: 13,
-        15: 14,
-        16: 15
-    },
-    cmw500.ModulationType.Q64: {
-        17: 15,
-        18: 16,
-        19: 17,
-        20: 18,
-        21: 19,
-        22: 20,
-        23: 21,
-        24: 22,
-        25: 23,
-        26: 24,
-        27: 25,
-        28: 26
-    }
-}
-
-# get mcs vs tbsi map with 256-qam enabled(downlink)
-get_mcs_tbsi_map_for_256qam_dl = {
-    cmw500.ModulationType.QPSK: {
-        0: 0,
-        1: 2,
-        2: 4,
-        3: 6,
-        4: 8,
-    },
-    cmw500.ModulationType.Q16: {
-        5: 10,
-        6: 11,
-        7: 12,
-        8: 13,
-        9: 14,
-        10: 15
-    },
-    cmw500.ModulationType.Q64: {
-        11: 16,
-        12: 17,
-        13: 18,
-        14: 19,
-        15: 20,
-        16: 21,
-        17: 22,
-        18: 23,
-        19: 24
-    },
-    cmw500.ModulationType.Q256: {
-        20: 25,
-        21: 27,
-        22: 28,
-        23: 29,
-        24: 30,
-        25: 31,
-        26: 32,
-        27: 33
-    }
-}
-
-# get mcs vs tbsi map (uplink)
-get_mcs_tbsi_map_ul = {
-    cmw500.ModulationType.QPSK: {
-        0: 0,
-        1: 1,
-        2: 2,
-        3: 3,
-        4: 4,
-        5: 5,
-        6: 6,
-        7: 7,
-        8: 8,
-        9: 9
-    },
-    cmw500.ModulationType.Q16: {
-        10: 10,
-        11: 10,
-        12: 11,
-        13: 12,
-        14: 13,
-        15: 14,
-        16: 15,
-        17: 16,
-        18: 17,
-        19: 18,
-        20: 19,
-        21: 19,
-        22: 20,
-        23: 21,
-        24: 22,
-        25: 23,
-        26: 24,
-        27: 25,
-        28: 26
-    }
-}
-
-
-class CMW500CellularSimulator(cc.AbstractCellularSimulator):
-    """ A cellular simulator for telephony simulations based on the CMW 500
-    controller. """
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 1
-
-    def __init__(self, ip_address, port):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the CMW500
-            port: the port number for the CMW500 controller
-        """
-        super().__init__()
-
-        try:
-            self.cmw = cmw500.Cmw500(ip_address, port)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('Could not connect to CMW500.')
-
-        self.bts = None
-        self.dl_modulation = None
-        self.ul_modulation = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.cmw.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        self.cmw.connection_type = cmw500.ConnectionType.DAU
-        self.bts = [self.cmw.get_base_station()]
-        self.cmw.switch_lte_signalling(cmw500.LteState.LTE_ON)
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        if enabled:
-            self.cmw.rrc_connection = cmw500.RrcState.RRC_OFF
-            self.cmw.rrc_connection_timer = time
-        else:
-            self.cmw.rrc_connection = cmw500.RrcState.RRC_ON
-
-    def set_band(self, bts_index, band):
-        """ Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            band: the new band
-        """
-        bts = self.bts[bts_index]
-        bts.duplex_mode = self.get_duplex_mode(band)
-        band = 'OB' + band
-        bts.band = band
-        self.log.debug('Band set to {}'.format(band))
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-        if 33 <= int(band) <= 46:
-            return cmw500.DuplexMode.TDD
-        else:
-            return cmw500.DuplexMode.FDD
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        bts = self.bts[bts_index]
-        if input_power > 23:
-            self.log.warning('Open loop supports-50dBm to 23 dBm. '
-                             'Setting it to max power 23 dBm')
-            input_power = 23
-        bts.uplink_power_control = input_power
-        bts.tpc_power_control = cmw500.TpcPowerControl.CLOSED_LOOP
-        bts.tpc_closed_loop_target_power = input_power
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        bts = self.bts[bts_index]
-        bts.downlink_power_level = output_power
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """ Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tdd_config: the new tdd configuration number
-        """
-        self.bts[bts_index].uldl_configuration = tdd_config
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        if not 0 <= ssf_config <= 9:
-            raise ValueError('The Special Sub-Frame configuration has to be a '
-                             'number between 0 and 9.')
-
-        self.bts[bts_index].tdd_special_subframe = ssf_config
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth
-        """
-        bts = self.bts[bts_index]
-
-        if bandwidth == 20:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_20MHz
-        elif bandwidth == 15:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_15MHz
-        elif bandwidth == 10:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_10MHz
-        elif bandwidth == 5:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_5MHz
-        elif bandwidth == 3:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_3MHz
-        elif bandwidth == 1.4:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_1MHz
-        else:
-            msg = 'Bandwidth {} MHz is not valid for LTE'.format(bandwidth)
-            raise ValueError(msg)
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        bts = self.bts[bts_index]
-        bts.dl_channel = channel_number
-        self.log.debug('Downlink Channel set to {}'.format(bts.dl_channel))
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """ Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        bts = self.bts[bts_index]
-        mimo_mode = CMW_MIMO_MAPPING[mimo_mode]
-        if mimo_mode == cmw500.MimoModes.MIMO1x1:
-            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN1x1)
-            bts.dl_antenna = cmw500.MimoModes.MIMO1x1
-
-        elif mimo_mode == cmw500.MimoModes.MIMO2x2:
-            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN2x2)
-            bts.dl_antenna = cmw500.MimoModes.MIMO2x2
-
-        elif mimo_mode == cmw500.MimoModes.MIMO4x4:
-            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN4x4)
-            bts.dl_antenna = cmw500.MimoModes.MIMO4x4
-        else:
-            raise RuntimeError('The requested MIMO mode is not supported.')
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tmode: the new transmission mode
-        """
-        bts = self.bts[bts_index]
-
-        tmode = CMW_TM_MAPPING[tmode]
-
-        if (tmode in [
-                cmw500.TransmissionModes.TM1, cmw500.TransmissionModes.TM7
-        ] and bts.dl_antenna == cmw500.MimoModes.MIMO1x1.value):
-            bts.transmode = tmode
-        elif (tmode.value in cmw500.TransmissionModes.__members__
-              and bts.dl_antenna == cmw500.MimoModes.MIMO2x2.value):
-            bts.transmode = tmode
-        elif (tmode in [
-                cmw500.TransmissionModes.TM2, cmw500.TransmissionModes.TM3,
-                cmw500.TransmissionModes.TM4, cmw500.TransmissionModes.TM9
-        ] and bts.dl_antenna == cmw500.MimoModes.MIMO4x4.value):
-            bts.transmode = tmode
-
-        else:
-            raise ValueError('Transmission modes should support the current '
-                             'mimo mode')
-
-    def set_scheduling_mode(self,
-                            bts_index,
-                            scheduling,
-                            mcs_dl=None,
-                            mcs_ul=None,
-                            nrb_dl=None,
-                            nrb_ul=None):
-        """ Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        bts = self.bts[bts_index]
-        bts.reduced_pdcch = cmw500.ReducedPdcch.ON
-
-        scheduling = CMW_SCH_MAPPING[scheduling]
-        bts.scheduling_mode = scheduling
-
-        if not (self.ul_modulation and self.dl_modulation):
-            raise ValueError('Modulation should be set prior to scheduling '
-                             'call')
-
-        if scheduling == cmw500.SchedulingMode.RMC:
-
-            if not nrb_ul and nrb_dl:
-                raise ValueError('nrb_ul and nrb dl should not be none')
-
-            bts.rb_configuration_ul = (nrb_ul, self.ul_modulation, 'KEEP')
-            self.log.info('ul rb configurations set to {}'.format(
-                bts.rb_configuration_ul))
-
-            time.sleep(1)
-
-            self.log.debug('Setting rb configurations for down link')
-            bts.rb_configuration_dl = (nrb_dl, self.dl_modulation, 'KEEP')
-            self.log.info('dl rb configurations set to {}'.format(
-                bts.rb_configuration_ul))
-
-        elif scheduling == cmw500.SchedulingMode.USERDEFINEDCH:
-
-            if not all([nrb_ul, nrb_dl, mcs_dl, mcs_ul]):
-                raise ValueError('All parameters are mandatory.')
-
-            tbs = get_mcs_tbsi_map_ul[self.ul_modulation][mcs_ul]
-
-            bts.rb_configuration_ul = (nrb_ul, 0, self.ul_modulation, tbs)
-            self.log.info('ul rb configurations set to {}'.format(
-                bts.rb_configuration_ul))
-
-            time.sleep(1)
-
-            if self.dl_256_qam_enabled:
-                tbs = get_mcs_tbsi_map_for_256qam_dl[
-                    self.dl_modulation][mcs_dl]
-            else:
-                tbs = get_mcs_tbsi_map_dl[self.dl_modulation][mcs_dl]
-
-            bts.rb_configuration_dl = (nrb_dl, 0, self.dl_modulation, tbs)
-            self.log.info('dl rb configurations set to {}'.format(
-                bts.rb_configuration_dl))
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        self.log.info('Set 256 QAM DL MCS enabled: ' + str(enabled))
-        self.dl_modulation = cmw500.ModulationType.Q256 if enabled \
-            else cmw500.ModulationType.Q64
-        self.dl_256_qam_enabled = enabled
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.log.info('Set 64 QAM UL MCS enabled: ' + str(enabled))
-        self.ul_modulation = cmw500.ModulationType.Q64 if enabled \
-            else cmw500.ModulationType.Q16
-        self.ul_64_qam_enabled = enabled
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        # TODO (b/143918664): CMW500 doesn't have an equivalent setting.
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        # TODO (b/143497738): implement.
-        self.log.error('Setting CFI is not yet implemented in the CMW500 '
-                       'controller.')
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        # TODO (b/146068532): implement.
-        self.log.error('Setting the paging cycle duration is not yet '
-                       'implemented in the CMW500 controller.')
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.log.error('Configuring the PHICH resource setting is not yet '
-                       'implemented in the CMW500 controller.')
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-        raise NotImplementedError()
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.cmw.wait_for_attached_state(timeout=timeout)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'attached state before '
-                                            'the timeout period ended.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.cmw.wait_for_rrc_state(cmw500.LTE_CONN_RESP, timeout=timeout)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Communication state before '
-                                            'the timeout period ended.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.cmw.wait_for_rrc_state(cmw500.LTE_IDLE_RESP, timeout=timeout)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Idle state before '
-                                            'the timeout period ended.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        self.cmw.detach()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        raise NotImplementedError()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmx500.py b/src/antlion/controllers/rohdeschwarz_lib/cmx500.py
deleted file mode 100644
index ebdc9f9..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmx500.py
+++ /dev/null
@@ -1,1067 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-import sys
-
-from enum import Enum
-from os import path
-from antlion.controllers import abstract_inst
-
-DEFAULT_XLAPI_PATH = '/home/mobileharness/Rohde-Schwarz/XLAPI/latest/venv/lib/python3.7/site-packages'
-DEFAULT_LTE_STATE_CHANGE_TIMER = 10
-DEFAULT_CELL_SWITCH_ON_TIMER = 60
-DEFAULT_ENDC_TIMER = 300
-
-logger = logging.getLogger('Xlapi_cmx500')
-
-LTE_CELL_PROPERTIES = [
-    'band',
-    'bandwidth',
-    'dl_earfcn',
-    'ul_earfcn',
-    'total_dl_power',
-    'p_b',
-    'dl_epre',
-    'ref_signal_power',
-    'm',
-    'beamforming_antenna_ports',
-    'p0_nominal_pusch',
-]
-
-LTE_MHZ_UPPER_BOUND_TO_RB = [
-    (1.5, 6),
-    (4.0, 15),
-    (7.5, 25),
-    (12.5, 50),
-    (17.5, 75),
-]
-
-class DciFormat(Enum):
-    """Support DCI Formats for MIMOs."""
-    DCI_FORMAT_0 = 1
-    DCI_FORMAT_1 = 2
-    DCI_FORMAT_1A = 3
-    DCI_FORMAT_1B = 4
-    DCI_FORMAT_1C = 5
-    DCI_FORMAT_2 = 6
-    DCI_FORMAT_2A = 7
-    DCI_FORMAT_2B = 8
-    DCI_FORMAT_2C = 9
-    DCI_FORMAT_2D = 10
-
-
-class DuplexMode(Enum):
-    """Duplex Modes."""
-    FDD = 'FDD'
-    TDD = 'TDD'
-    DL_ONLY = 'DL_ONLY'
-
-
-class LteBandwidth(Enum):
-    """Supported LTE bandwidths."""
-    BANDWIDTH_1MHz = 6 # MHZ_1 is RB_6
-    BANDWIDTH_3MHz = 15 # MHZ_3 is RB_15
-    BANDWIDTH_5MHz = 25 # MHZ_5 is RB_25
-    BANDWIDTH_10MHz = 50 # MHZ_10 is RB_50
-    BANDWIDTH_15MHz = 75 # MHZ_15 is RB_75
-    BANDWIDTH_20MHz = 100 # MHZ_20 is RB_100
-
-
-class LteState(Enum):
-    """LTE ON and OFF."""
-    LTE_ON = 'ON'
-    LTE_OFF = 'OFF'
-
-
-class MimoModes(Enum):
-    """MIMO Modes dl antennas."""
-    MIMO1x1 = 1
-    MIMO2x2 = 2
-    MIMO4x4 = 4
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    Q16 = 0
-    Q64 = 1
-    Q256 = 2
-
-
-class NasState(Enum):
-    """NAS state between callbox and dut."""
-    DEREGISTERED = 'OFF'
-    EMM_REGISTERED = 'EMM'
-    MM5G_REGISTERED = 'NR'
-
-
-class RrcState(Enum):
-    """States to enable/disable rrc."""
-    RRC_ON = 'ON'
-    RRC_OFF = 'OFF'
-
-
-class RrcConnectionState(Enum):
-    """RRC Connection states, describes possible DUT RRC connection states."""
-    IDLE = 1
-    IDLE_PAGING = 2
-    IDLE_CONNECTION_ESTABLISHMENT = 3
-    CONNECTED = 4
-    CONNECTED_CONNECTION_REESTABLISHMENT = 5
-    CONNECTED_SCG_FAILURE = 6
-    CONNECTED_HANDOVER = 7
-    CONNECTED_CONNECTION_RELEASE = 8
-
-
-class SchedulingMode(Enum):
-    """Supported scheduling modes."""
-    USERDEFINEDCH = 'UDCHannels'
-
-
-class TransmissionModes(Enum):
-    """Supported transmission modes."""
-    TM1 = 1
-    TM2 = 2
-    TM3 = 3
-    TM4 = 4
-    TM7 = 7
-    TM8 = 8
-    TM9 = 9
-
-
-# For mimo 1x1, also set_num_crs_antenna_ports to 1
-MIMO_MAX_LAYER_MAPPING = {
-    MimoModes.MIMO1x1: 2,
-    MimoModes.MIMO2x2: 2,
-    MimoModes.MIMO4x4: 4,
-}
-
-
-class Cmx500(abstract_inst.SocketInstrument):
-
-    def __init__(self, ip_addr, port, xlapi_path=DEFAULT_XLAPI_PATH):
-        """Init method to setup variables for the controller.
-
-        Args:
-              ip_addr: Controller's ip address.
-              port: Port.
-        """
-
-        # keeps the socket connection for debug purpose for now
-        super().__init__(ip_addr, port)
-        if not xlapi_path in sys.path:
-            sys.path.insert(0, xlapi_path)
-        self._initial_xlapi()
-        self._settings.system.set_instrument_address(ip_addr)
-        logger.info('The instrument address is {}'.format(
-                self._settings.system.get_instrument_address()))
-
-        self.bts = []
-
-        # Stops all active cells if there is any
-        self.disconnect()
-
-        # loads cell default settings from parameter file if there is one
-        default_setup_path = 'default_cell_setup.rsxp'
-        if path.exists(default_setup_path):
-            self._settings.session.set_test_param_files(default_setup_path)
-
-        self.dut = self._network.get_dut()
-        self.lte_cell = self._network.create_lte_cell('ltecell0')
-        self.nr_cell = self._network.create_nr_cell('nrcell0')
-        self._config_antenna_ports()
-        self.lte_rrc_state_change_timer = DEFAULT_LTE_STATE_CHANGE_TIMER
-        self.rrc_state_change_time_enable = False
-        self.cell_switch_on_timer = DEFAULT_CELL_SWITCH_ON_TIMER
-
-    # _config_antenna_ports for the special RF connection with cmw500 + cmx500.
-    def _config_antenna_ports(self):
-        from rs_mrt.testenvironment.signaling.sri.rat.common import CsiRsAntennaPorts
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import CrsAntennaPorts
-
-        max_csi_rs_ports = CsiRsAntennaPorts.NUMBER_CSI_RS_ANTENNA_PORTS_FOUR
-        max_crs_ports = CrsAntennaPorts.NUMBER_CRS_ANTENNA_PORTS_FOUR
-
-        lte_cell_max_config = self.lte_cell.stub.GetMaximumConfiguration()
-        lte_cell_max_config.csi_rs_antenna_ports = max_csi_rs_ports
-        lte_cell_max_config.crs_antenna_ports = max_crs_ports
-        self.lte_cell.stub.SetMaximumConfiguration(lte_cell_max_config)
-
-        nr_cell_max_config = self.nr_cell.stub.GetMaximumConfiguration()
-        nr_cell_max_config.csi_rs_antenna_ports = max_csi_rs_ports
-        self.nr_cell.stub.SetMaximumConfiguration(nr_cell_max_config)
-
-    def _initial_xlapi(self):
-        import xlapi
-        import mrtype
-        from xlapi import network
-        from xlapi import settings
-
-        self._xlapi = xlapi
-        self._network = network
-        self._settings = settings
-
-    def configure_mimo_settings(self, mimo, bts_index=0):
-        """Sets the mimo scenario for the test.
-
-        Args:
-            mimo: mimo scenario to set.
-        """
-        self.bts[bts_index].set_mimo_mode(mimo)
-
-    @property
-    def connection_type(self):
-        """Gets the connection type applied in callbox."""
-        state = self.dut.state.rrc_connection_state
-        return RrcConnectionState(state.value)
-
-    def create_base_station(self, cell):
-        """Creates the base station object with cell and current object.
-
-        Args:
-            cell: the XLAPI cell.
-
-        Returns:
-            base station object.
-        Raise:
-            CmxError if the cell is neither LTE nor NR.
-        """
-        from xlapi.lte_cell import LteCell
-        from xlapi.nr_cell import NrCell
-        if isinstance(cell, LteCell):
-            return LteBaseStation(self, cell)
-        elif isinstance(cell, NrCell):
-            return NrBaseStation(self, cell)
-        else:
-            raise CmxError('The cell type is neither LTE nor NR')
-
-    def detach(self):
-        """Detach callbox and controller."""
-        for bts in self.bts:
-            bts.stop()
-
-    def disable_packet_switching(self):
-        """Disable packet switching in call box."""
-        raise NotImplementedError()
-
-    def disconnect(self):
-        """Disconnect controller from device and switch to local mode."""
-
-        # Stops all lte and nr_cell
-        for cell in self._network.get_all_lte_cells():
-            if cell.is_on():
-                cell.stop()
-
-        for cell in self._network.get_all_nr_cells():
-            if cell.is_on():
-                cell.stop()
-        self.bts.clear()
-        self._network.reset()
-
-    def enable_packet_switching(self):
-        """Enable packet switching in call box."""
-        raise NotImplementedError()
-
-    def get_base_station(self, bts_index=0):
-        """Gets the base station object based on bts num. By default
-        bts_index set to 0 (PCC).
-
-        Args:
-            bts_num: base station identifier
-
-        Returns:
-            base station object.
-        """
-        return self.bts[bts_index]
-
-    def get_network(self):
-        """ Gets the network object from cmx500 object."""
-        return self._network
-
-    def init_lte_measurement(self):
-        """Gets the class object for lte measurement which can be used to
-        initiate measurements.
-
-        Returns:
-            lte measurement object.
-        """
-        raise NotImplementedError()
-
-    def reset(self):
-        """System level reset."""
-
-        self.disconnect()
-
-    @property
-    def rrc_connection(self):
-        """Gets the RRC connection state."""
-        return self.dut.state.rrc.is_connected
-
-    def set_timer(self, timeout):
-        """Sets timer for the Cmx500 class."""
-        self.rrc_state_change_time_enable = True
-        self.lte_rrc_state_change_timer = timeout
-
-    def switch_lte_signalling(self, state):
-        """ Turns LTE signalling ON/OFF.
-
-        Args:
-            state: an instance of LteState indicating the state to which LTE
-                   signal has to be set.
-        """
-        if not isinstance(state, LteState):
-            raise ValueError('state should be the instance of LteState.')
-
-        if self.bts:
-            self.disconnect()
-        self.bts.append(LteBaseStation(self, self.lte_cell))
-        # Switch on the primary Lte cell for on state and switch all lte cells
-        # if the state is off state
-        if state.value == 'ON':
-            self.bts[0].start()
-            cell_status = self.bts[0].wait_cell_on(self.cell_switch_on_timer)
-            if cell_status:
-                logger.info('The LTE pcell status is on')
-            else:
-                raise CmxError('The LTE pcell cannot be switched on')
-        else:
-            for bts in self.bts:
-                if isinstance(bts, LteBaseStation):
-                    bts.stop()
-                logger.info(
-                    'The LTE cell status is {} after stop'.format(bts.is_on()))
-
-    def switch_on_nsa_signalling(self):
-        if self.bts:
-            self.disconnect()
-        logger.info('Switches on NSA signalling')
-        self.bts.append(LteBaseStation(self, self.lte_cell))
-        self.bts.append(NrBaseStation(self, self.nr_cell))
-        self.bts[0].start()
-        lte_cell_status = self.bts[0].wait_cell_on(self.cell_switch_on_timer)
-        if lte_cell_status:
-            logger.info('The LTE pcell status is on')
-        else:
-            raise CmxError('The LTE pcell cannot be switched on')
-
-        self.bts[1].start()
-        nr_cell_status = self.bts[1].wait_cell_on(self.cell_switch_on_timer)
-        if nr_cell_status:
-            logger.info('The NR cell status is on')
-        else:
-            raise CmxError('The NR cell cannot be switched on')
-
-    def update_lte_cell_config(self, config):
-        """Updates lte cell settings with config."""
-        set_counts = 0
-        for property in LTE_CELL_PROPERTIES:
-            if property in config:
-                setter_name = 'set_' + property
-                setter = getattr(self.lte_cell, setter_name)
-                setter(config[property])
-                set_counts += 1
-        if set_counts < len(config):
-            logger.warning('Not all configs were set in update_cell_config')
-
-    @property
-    def use_carrier_specific(self):
-        """Gets current status of carrier specific duplex configuration."""
-        raise NotImplementedError()
-
-    @use_carrier_specific.setter
-    def use_carrier_specific(self, state):
-        """Sets the carrier specific duplex configuration.
-
-        Args:
-            state: ON/OFF UCS configuration.
-        """
-        raise NotImplementedError()
-
-    def wait_for_rrc_state(self, state, timeout=120):
-        """ Waits until a certain RRC state is set.
-
-        Args:
-            state: the RRC state that is being waited for.
-            timeout: timeout for phone to be in connected state.
-
-        Raises:
-            CmxError on time out.
-        """
-        is_idle = (state.value == 'OFF')
-        for idx in range(timeout):
-            time.sleep(1)
-            if self.dut.state.rrc.is_idle == is_idle:
-                logger.info('{} reached at {} s'.format(state.value, idx))
-                return True
-        error_message = 'Waiting for {} state timeout after {}'.format(
-                state.value, timeout)
-        logger.error(error_message)
-        raise CmxError(error_message)
-
-    def wait_until_attached(self, timeout=120):
-        """Waits until Lte attached.
-
-        Args:
-            timeout: timeout for phone to get attached.
-
-        Raises:
-            CmxError on time out.
-        """
-        try:
-            self.dut.signaling.wait_for_lte_attach(self.lte_cell, timeout)
-        except:
-            raise CmxError(
-                    'wait_until_attached timeout after {}'.format(timeout))
-
-
-class BaseStation(object):
-    """Class to interact with different the base stations."""
-
-    def __init__(self, cmx, cell):
-        """Init method to setup variables for base station.
-
-        Args:
-            cmx: Controller (Cmx500) object.
-            cell: The cell for the base station.
-        """
-
-        self._cell = cell
-        self._cmx = cmx
-        self._cc = cmx.dut.cc(cell)
-        self._network = cmx.get_network()
-
-    @property
-    def band(self):
-        """Gets the current band of cell.
-
-        Return:
-            the band number in int.
-        """
-        cell_band = self._cell.get_band()
-        return int(cell_band)
-
-    @property
-    def dl_power(self):
-        """Gets RSPRE level.
-
-        Return:
-            the power level in dbm.
-        """
-        return self._cell.get_total_dl_power().in_dBm()
-
-    @property
-    def duplex_mode(self):
-        """Gets current duplex of cell."""
-        band = self._cell.get_band()
-        if band.is_fdd():
-            return DuplexMode.FDD
-        if band.is_tdd():
-            return DuplexMode.TDD
-        if band.is_dl_only():
-            return DuplexMode.DL_ONLY
-
-    def is_on(self):
-        """Verifies if the cell is turned on.
-
-            Return:
-                boolean (if the cell is on).
-        """
-        return self._cell.is_on()
-
-    def set_band(self, band):
-        """Sets the Band of cell.
-
-        Args:
-            band: band of cell.
-        """
-        self._cell.set_band(band)
-        logger.info('The band is set to {} and is {} after setting'.format(
-                band, self.band))
-
-    def set_dl_mac_padding(self, state):
-        """Enables/Disables downlink padding at the mac layer.
-
-        Args:
-            state: a boolean
-        """
-        self._cc.set_dl_mac_padding(state)
-
-    def set_dl_power(self, pwlevel):
-        """Modifies RSPRE level.
-
-        Args:
-            pwlevel: power level in dBm.
-        """
-        self._cell.set_total_dl_power(pwlevel)
-
-    def set_ul_power(self, ul_power):
-        """Sets ul power
-
-        Args:
-            ul_power: the uplink power in dbm
-        """
-        self._cc.set_target_ul_power(ul_power)
-
-    def start(self):
-        """Starts the cell."""
-        self._cell.start()
-
-    def stop(self):
-        """Stops the cell."""
-        self._cell.stop()
-
-    def wait_cell_on(self, timeout):
-        """Waits the cell on.
-
-        Args:
-            timeout: the time for waiting the cell on.
-
-        Raises:
-            CmxError on time out.
-        """
-        waiting_time = 0
-        while waiting_time < timeout:
-            if self._cell.is_on():
-                return True
-            waiting_time += 1
-            time.sleep(1)
-        return self._cell.is_on()
-
-
-class LteBaseStation(BaseStation):
-    """ LTE base station."""
-
-    def __init__(self, cmx, cell):
-        """Init method to setup variables for the LTE base station.
-
-        Args:
-            cmx: Controller (Cmx500) object.
-            cell: The cell for the LTE base station.
-        """
-        from xlapi.lte_cell import LteCell
-        if not isinstance(cell, LteCell):
-            raise CmxError('The cell is not a LTE cell, LTE base station  fails'
-                           ' to create.')
-        super().__init__(cmx, cell)
-
-    def _config_scheduler(self, dl_mcs=None, dl_rb_alloc=None, dl_dci_ncce=None,
-        dl_dci_format=None, dl_tm=None, dl_num_layers=None, dl_mcs_table=None,
-        ul_mcs=None, ul_rb_alloc=None, ul_dci_ncce=None):
-
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import DciFormat
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import DlTransmissionMode
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import MaxLayersMIMO
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import McsTable
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import PdcchFormat
-
-        log_list = []
-        if dl_mcs:
-            log_list.append('dl_mcs: {}'.format(dl_mcs))
-        if ul_mcs:
-            log_list.append('ul_mcs: {}'.format(ul_mcs))
-        if dl_rb_alloc:
-            log_list.append('dl_rb_alloc: {}'.format(dl_rb_alloc))
-        if ul_rb_alloc:
-            log_list.append('ul_rb_alloc: {}'.format(ul_rb_alloc))
-        if dl_dci_ncce:
-            dl_dci_ncce = PdcchFormat(dl_dci_ncce)
-            log_list.append('dl_dci_ncce: {}'.format(dl_dci_ncce))
-        if ul_dci_ncce:
-            ul_dci_ncce = PdcchFormat(ul_dci_ncce)
-            log_list.append('ul_dci_ncce: {}'.format(ul_dci_ncce))
-        if dl_dci_format:
-            dl_dci_format = DciFormat(dl_dci_format)
-            log_list.append('dl_dci_format: {}'.format(dl_dci_format))
-        if dl_tm:
-            dl_tm = DlTransmissionMode(dl_tm.value)
-            log_list.append('dl_tm: {}'.format(dl_tm))
-        if dl_num_layers:
-            dl_num_layers = MaxLayersMIMO(dl_num_layers)
-            log_list.append('dl_num_layers: {}'.format(dl_num_layers))
-        if dl_mcs_table:
-            dl_mcs_table = McsTable(dl_mcs_table)
-            log_list.append('dl_mcs_table: {}'.format(dl_mcs_table))
-
-        is_on = self._cell.is_on()
-        num_crs_antenna_ports = self._cell.get_num_crs_antenna_ports()
-
-        # Sets num of crs antenna ports to 4 for configuring
-        if is_on:
-            self._cell.stop()
-            time.sleep(1)
-        self._cell.set_num_crs_antenna_ports(4)
-        scheduler = self._cmx.dut.get_scheduler(self._cell)
-        logger.info('configure scheduler for {}'.format(','.join(log_list)))
-        scheduler.configure_scheduler(
-                dl_mcs=dl_mcs, dl_rb_alloc=dl_rb_alloc, dl_dci_ncce=dl_dci_ncce,
-                dl_dci_format=dl_dci_format, dl_tm=dl_tm,
-                dl_num_layers=dl_num_layers, dl_mcs_table=dl_mcs_table,
-                ul_mcs=ul_mcs, ul_rb_alloc=ul_rb_alloc, ul_dci_ncce=ul_dci_ncce)
-        logger.info('Configure scheduler succeeds')
-
-        # Sets num of crs antenna ports back to previous value
-        self._cell.set_num_crs_antenna_ports(num_crs_antenna_ports)
-        self._network.apply_changes()
-
-        if is_on:
-            self._cell.start()
-
-    @property
-    def bandwidth(self):
-        """Get the channel bandwidth of the cell.
-
-        Return:
-            the number rb of the bandwidth.
-        """
-        return self._cell.get_bandwidth().num_rb
-
-    @property
-    def dl_channel(self):
-        """Gets the downlink channel of cell.
-
-        Return:
-            the downlink channel (earfcn) in int.
-        """
-        return int(self._cell.get_dl_earfcn())
-
-    @property
-    def dl_frequency(self):
-        """Get the downlink frequency of the cell."""
-        from mrtype.frequency import Frequency
-        return self._cell.get_dl_earfcn().to_freq().in_units(
-                Frequency.Units.GHz)
-
-    def _to_rb_bandwidth(self, bandwidth):
-        for idx in range(5):
-            if bandwidth < LTE_MHZ_UPPER_BOUND_TO_RB[idx][0]:
-                return LTE_MHZ_UPPER_BOUND_TO_RB[idx][1]
-        return 100
-
-    def set_bandwidth(self, bandwidth):
-        """Sets the channel bandwidth of the cell.
-
-        Args:
-            bandwidth: channel bandwidth of cell in MHz.
-        """
-        self._cell.set_bandwidth(self._to_rb_bandwidth(bandwidth))
-
-    def set_cell_frequency_band(self, tdd_cfg=None, ssf_cfg=None):
-        """Sets cell frequency band with tdd and ssf config.
-
-        Args:
-            tdd_cfg: the tdd subframe assignment config in number (from 0-6).
-            ssf_cfg: the special subframe pattern config in number (from 1-9).
-        """
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import SpecialSubframePattern
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import SubFrameAssignment
-        from rs_mrt.testenvironment.signaling.sri.rat.lte.config import CellFrequencyBand
-        from rs_mrt.testenvironment.signaling.sri.rat.lte.config import Tdd
-        tdd_subframe = None
-        ssf_pattern = None
-        if tdd_cfg:
-            tdd_subframe = SubFrameAssignment(tdd_cfg + 1)
-        if ssf_cfg:
-            ssf_pattern = SpecialSubframePattern(ssf_cfg)
-        tdd = Tdd(tdd_config=Tdd.TddConfigSignaling(
-                subframe_assignment=tdd_subframe,
-                special_subframe_pattern=ssf_pattern))
-        self._cell.stub.SetCellFrequencyBand(CellFrequencyBand(tdd=tdd))
-        self._network.apply_changes()
-
-    def set_cfi(self, cfi):
-        """Sets number of pdcch symbols (cfi).
-
-        Args:
-            cfi: the value of NumberOfPdcchSymbols
-        """
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import NumberOfPdcchSymbols
-        from rs_mrt.testenvironment.signaling.sri.rat.lte.config import PdcchRegionReq
-
-        logger.info('The cfi enum to set is {}'.format(
-                NumberOfPdcchSymbols(cfi)))
-        req = PdcchRegionReq()
-        req.num_pdcch_symbols = NumberOfPdcchSymbols(cfi)
-        self._cell.stub.SetPdcchControlRegion(req)
-
-    def set_dci_format(self, dci_format):
-        """Selects the downlink control information (DCI) format.
-
-        Args:
-            dci_format: supported dci.
-        """
-        if not isinstance(dci_format, DciFormat):
-            raise CmxError('Wrong type for dci_format')
-        self._config_scheduler(dl_dci_format=dci_format.value)
-
-    def set_dl_channel(self, channel):
-        """Sets the downlink channel number of cell.
-
-        Args:
-            channel: downlink channel number of cell.
-        """
-        if self.dl_channel == channel:
-            logger.info('The dl_channel was at {}'.format(self.dl_channel))
-            return
-        self._cell.set_earfcn(channel)
-        logger.info('The dl_channel was set to {}'.format(self.dl_channel))
-
-    def set_dl_modulation_table(self, modulation):
-        """Sets down link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        self._config_scheduler(dl_mcs_table=modulation.value)
-
-    def set_mimo_mode(self, mimo):
-        """Sets mimo mode for Lte scenario.
-
-        Args:
-            mimo: the mimo mode.
-        """
-        if not isinstance(mimo, MimoModes):
-            raise CmxError("Wrong type of mimo mode")
-
-        is_on = self._cell.is_on()
-        if is_on:
-            self._cell.stop()
-        self._cell.set_num_crs_antenna_ports(mimo.value)
-        self._config_scheduler(dl_num_layers=MIMO_MAX_LAYER_MAPPING[mimo])
-        if is_on:
-            self._cell.start()
-
-    def set_scheduling_mode(
-        self, mcs_dl=None, mcs_ul=None, nrb_dl=None, nrb_ul=None):
-        """Sets scheduling mode.
-
-        Args:
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        self._config_scheduler(dl_mcs=mcs_dl, ul_mcs=mcs_ul, dl_rb_alloc=nrb_dl,
-                ul_rb_alloc=nrb_ul)
-
-    def set_ssf_config(self, ssf_config):
-        """Sets ssf subframe assignment with tdd_config.
-
-        Args:
-            ssf_config: the special subframe pattern config (from 1-9).
-        """
-        self.set_cell_frequency_band(ssf_cfg=ssf_config)
-
-    def set_tdd_config(self, tdd_config):
-        """Sets tdd subframe assignment with tdd_config.
-
-        Args:
-            tdd_config: the subframe assignemnt config (from 0-6).
-        """
-        self.set_cell_frequency_band(tdd_cfg=tdd_config)
-
-    def set_transmission_mode(self, transmission_mode):
-        """Sets transmission mode with schedular.
-
-        Args:
-            transmission_mode: the download link transmission mode.
-        """
-        if not isinstance(transmission_mode, TransmissionModes):
-            raise CmxError('Wrong type of the trasmission mode')
-        self._config_scheduler(dl_tm=transmission_mode)
-
-    def set_ul_channel(self, channel):
-        """Sets the up link channel number of cell.
-
-        Args:
-            channel: up link channel number of cell.
-        """
-        if self.ul_channel == channel:
-            logger.info('The ul_channel is at {}'.format(self.ul_channel))
-            return
-        self._cell.set_earfcn(channel)
-        logger.info('The dl_channel was set to {}'.format(self.ul_channel))
-
-    @property
-    def ul_channel(self):
-        """Gets the uplink channel of cell.
-
-        Return:
-            the uplink channel (earfcn) in int
-        """
-        return int(self._cell.get_ul_earfcn())
-
-    @property
-    def ul_frequency(self):
-        """Get the uplink frequency of the cell.
-
-        Return:
-            The uplink frequency in GHz.
-        """
-        from mrtype.frequency import Frequency
-        return self._cell.get_ul_earfcn().to_freq().in_units(
-                Frequency.Units.GHz)
-
-    def set_ul_modulation_table(self, modulation):
-        """Sets up link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        if modulation == ModulationType.Q16:
-            self._cell.stub.SetPuschCommonConfig(False)
-        else:
-            self._cell.stub.SetPuschCommonConfig(True)
-
-
-class NrBaseStation(BaseStation):
-    """ NR base station."""
-
-    def __init__(self, cmx, cell):
-        """Init method to setup variables for the NR base station.
-
-        Args:
-            cmx: Controller (Cmx500) object.
-            cell: The cell for the NR base station.
-        """
-        from xlapi.nr_cell import NrCell
-        if not isinstance(cell, NrCell):
-            raise CmxError('the cell is not a NR cell, NR base station  fails'
-                           ' to creat.')
-
-        super().__init__(cmx, cell)
-
-    def _config_scheduler(self, dl_mcs=None, dl_mcs_table=None,
-                          dl_rb_alloc=None, dl_mimo_mode=None,
-                          ul_mcs=None, ul_mcs_table=None, ul_rb_alloc=None,
-                          ul_mimo_mode=None):
-
-        from rs_mrt.testenvironment.signaling.sri.rat.nr import McsTable
-
-        log_list = []
-        if dl_mcs:
-            log_list.append('dl_mcs: {}'.format(dl_mcs))
-        if ul_mcs:
-            log_list.append('ul_mcs: {}'.format(ul_mcs))
-
-        # If rb alloc is not a tuple, add 0 as start RBs for XLAPI NR scheduler
-        if dl_rb_alloc:
-            if not isinstance(dl_rb_alloc, tuple):
-                dl_rb_alloc = (0, dl_rb_alloc)
-            log_list.append('dl_rb_alloc: {}'.format(dl_rb_alloc))
-        if ul_rb_alloc:
-            if not isinstance(ul_rb_alloc, tuple):
-                ul_rb_alloc = (0, ul_rb_alloc)
-            log_list.append('ul_rb_alloc: {}'.format(ul_rb_alloc))
-        if dl_mcs_table:
-            dl_mcs_table = McsTable(dl_mcs_table)
-            log_list.append('dl_mcs_table: {}'.format(dl_mcs_table))
-        if ul_mcs_table:
-            ul_mcs_table = McsTable(ul_mcs_table)
-            log_list.append('ul_mcs_table: {}'.format(ul_mcs_table))
-        if dl_mimo_mode:
-            log_list.append('dl_mimo_mode: {}'.format(dl_mimo_mode))
-        if ul_mimo_mode:
-            log_list.append('ul_mimo_mode: {}'.format(ul_mimo_mode))
-
-        is_on = self._cell.is_on()
-        if is_on:
-            self._cell.stop()
-            time.sleep(1)
-        scheduler = self._cmx.dut.get_scheduler(self._cell)
-        logger.info('configure scheduler for {}'.format(','.join(log_list)))
-
-        scheduler.configure_ue_scheduler(
-                dl_mcs=dl_mcs, dl_mcs_table=dl_mcs_table,
-                dl_rb_alloc=dl_rb_alloc, dl_mimo_mode=dl_mimo_mode,
-                ul_mcs=ul_mcs, ul_mcs_table=ul_mcs_table,
-                ul_rb_alloc=ul_rb_alloc, ul_mimo_mode=ul_mimo_mode)
-        logger.info('Configure scheduler succeeds')
-        self._network.apply_changes()
-
-        if is_on:
-            self._cell.start()
-
-    def attach_as_secondary_cell(self, endc_timer=DEFAULT_ENDC_TIMER):
-        """Enable endc mode for NR cell.
-
-        Args:
-            endc_timer: timeout for endc state
-        """
-        logger.info('enable endc mode for nsa dual connection')
-        self._cmx.dut.signaling.nsa_dual_connect(self._cell)
-        time_count = 0
-        while time_count < endc_timer:
-            if str(self._cmx.dut.state.radio_connectivity) == \
-                    'RadioConnectivityMode.EPS_LTE_NR':
-                logger.info('enter endc mode')
-                return
-            time.sleep(1)
-            time_count += 1
-            if time_count % 30 == 0:
-                logger.info('did not reach endc at {} s'.format(time_count))
-        raise CmxError('Cannot reach endc after {} s'.format(endc_timer))
-
-    @property
-    def dl_channel(self):
-        """Gets the downlink channel of cell.
-
-        Return:
-            the downlink channel (nr_arfcn) in int.
-        """
-        return int(self._cell.get_dl_ref_a())
-
-    def _bandwidth_to_carrier_bandwidth(self, bandwidth):
-        """Converts bandwidth in MHz to CarrierBandwidth.
-            CarrierBandwidth Enum in XLAPI:
-                MHZ_5 = 0
-                MHZ_10 = 1
-                MHZ_15 = 2
-                MHZ_20 = 3
-                MHZ_25 = 4
-                MHZ_30 = 5
-                MHZ_40 = 6
-                MHZ_50 = 7
-                MHZ_60 = 8
-                MHZ_70 = 9
-                MHZ_80 = 10
-                MHZ_90 = 11
-                MHZ_100 = 12
-                MHZ_200 = 13
-                MHZ_400 = 14
-        Args:
-            bandwidth: channel bandwidth in MHz.
-
-        Return:
-            the corresponding NR Carrier Bandwidth.
-        """
-        from mrtype.nr.frequency import CarrierBandwidth
-        if bandwidth > 100:
-            return CarrierBandwidth(12 + bandwidth // 200)
-        elif bandwidth > 30:
-            return CarrierBandwidth(2 + bandwidth // 10)
-        else:
-            return CarrierBandwidth(bandwidth // 5 - 1)
-
-    def set_bandwidth(self, bandwidth, scs=None):
-        """Sets the channel bandwidth of the cell.
-
-        Args:
-            bandwidth: channel bandwidth of cell.
-            scs: subcarrier spacing (SCS) of resource grid 0
-        """
-        if not scs:
-            scs = self._cell.get_scs()
-        self._cell.set_carrier_bandwidth_and_scs(
-                self._bandwidth_to_carrier_bandwidth(bandwidth), scs)
-        logger.info('The bandwidth in MHz is {}. After setting, the value is {}'
-                    .format(bandwidth, str(self._cell.get_carrier_bandwidth())))
-
-    def set_dl_channel(self, channel):
-        """Sets the downlink channel number of cell.
-
-        Args:
-            channel: downlink channel number of cell.
-        """
-        from mrtype.nr.frequency import NrArfcn
-        if self.dl_channel == channel:
-            logger.info('The dl_channel was at {}'.format(self.dl_channel))
-            return
-        self._cell.set_dl_ref_a_offset(self.band, NrArfcn(channel))
-        logger.info('The dl_channel was set to {}'.format(self.dl_channel))
-
-    def set_dl_modulation_table(self, modulation):
-        """Sets down link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        self._config_scheduler(dl_mcs_table=modulation.value)
-
-    def set_mimo_mode(self, mimo):
-        """Sets mimo mode for NR nsa scenario.
-
-        Args:
-            mimo: the mimo mode.
-        """
-        from rs_mrt.testenvironment.signaling.sri.rat.nr import DownlinkMimoMode
-        if not isinstance(mimo, MimoModes):
-            raise CmxError("Wrong type of mimo mode")
-
-        is_on = self._cell.is_on()
-        if is_on:
-            self._cell.stop()
-        self._config_scheduler(dl_mimo_mode=DownlinkMimoMode.Enum(mimo.value))
-        if is_on:
-            self._cell.start()
-
-    def set_scheduling_mode(
-        self, mcs_dl=None, mcs_ul=None, nrb_dl=None, nrb_ul=None):
-        """Sets scheduling mode.
-
-        Args:
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        self._config_scheduler(dl_mcs=mcs_dl, ul_mcs=mcs_ul, dl_rb_alloc=nrb_dl,
-                ul_rb_alloc=nrb_ul)
-
-    def set_ssf_config(self, ssf_config):
-        """Sets ssf subframe assignment with tdd_config.
-
-        Args:
-            ssf_config: the special subframe pattern config (from 1-9).
-        """
-        raise CmxError('the set ssf config for nr did not implemente yet')
-
-    def set_tdd_config(self, tdd_config):
-        """Sets tdd subframe assignment with tdd_config.
-
-        Args:
-            tdd_config: the subframe assignemnt config (from 0-6).
-        """
-        raise CmxError('the set tdd config for nr did not implemente yet')
-
-    def set_transmission_mode(self, transmission_mode):
-        """Sets transmission mode with schedular.
-
-        Args:
-            transmission_mode: the download link transmission mode.
-        """
-        logger.info('The set transmission mode for nr is set by mimo mode')
-
-    def set_ul_modulation_table(self, modulation):
-        """Sets down link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        self._config_scheduler(ul_mcs_table=modulation.value)
-
-
-class CmxError(Exception):
-    """Class to raise exceptions related to cmx."""
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmx500_cellular_simulator.py b/src/antlion/controllers/rohdeschwarz_lib/cmx500_cellular_simulator.py
deleted file mode 100644
index e8a7871..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmx500_cellular_simulator.py
+++ /dev/null
@@ -1,389 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.rohdeschwarz_lib import cmx500
-from antlion.controllers.rohdeschwarz_lib.cmx500 import LteBandwidth
-from antlion.controllers.rohdeschwarz_lib.cmx500 import LteState
-from antlion.controllers import cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-
-CMX_TM_MAPPING = {
-    LteSimulation.TransmissionMode.TM1: cmx500.TransmissionModes.TM1,
-    LteSimulation.TransmissionMode.TM2: cmx500.TransmissionModes.TM2,
-    LteSimulation.TransmissionMode.TM3: cmx500.TransmissionModes.TM3,
-    LteSimulation.TransmissionMode.TM4: cmx500.TransmissionModes.TM4,
-    LteSimulation.TransmissionMode.TM7: cmx500.TransmissionModes.TM7,
-    LteSimulation.TransmissionMode.TM8: cmx500.TransmissionModes.TM8,
-    LteSimulation.TransmissionMode.TM9: cmx500.TransmissionModes.TM9,
-}
-
-CMX_SCH_MAPPING = {
-    LteSimulation.SchedulingMode.STATIC: cmx500.SchedulingMode.USERDEFINEDCH
-}
-
-CMX_MIMO_MAPPING = {
-    LteSimulation.MimoMode.MIMO_1x1: cmx500.MimoModes.MIMO1x1,
-    LteSimulation.MimoMode.MIMO_2x2: cmx500.MimoModes.MIMO2x2,
-    LteSimulation.MimoMode.MIMO_4x4: cmx500.MimoModes.MIMO4x4,
-}
-
-
-class CMX500CellularSimulator(cc.AbstractCellularSimulator):
-    """ A cellular simulator for telephony simulations based on the CMX 500
-    controller. """
-
-    def __init__(self, ip_address, port='5025'):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the CMX500
-            port: the port number for the CMX500 controller
-        """
-        super().__init__()
-        try:
-            self.cmx = cmx500.Cmx500(ip_address, port)
-        except:
-            raise cc.CellularSimulatorError('Error when Initializes CMX500.')
-
-        self.bts = self.cmx.bts
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.log.info('destroy the cmx500 simulator')
-        self.cmx.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        self.log.info('setup lte scenario')
-        self.cmx.switch_lte_signalling(cmx500.LteState.LTE_ON)
-
-    def setup_nr_sa_scenario(self):
-        """ Configures the equipment for an NR stand alone simulation. """
-        raise NotImplementedError()
-
-    def setup_nr_nsa_scenario(self):
-        """ Configures the equipment for an NR non stand alone simulation. """
-        self.log.info('setup nsa scenario (start lte cell and nr cell')
-        self.cmx.switch_on_nsa_signalling()
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        self.log.info('set timer enabled to {} and the time to {}'.format(
-            enabled, time))
-        self.cmx.rrc_state_change_time_enable = enabled
-        self.cmx.lte_rrc_state_change_timer = time
-
-    def set_band(self, bts_index, band):
-        """ Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            band: the new band
-        """
-        self.log.info('set band to {}'.format(band))
-        self.bts[bts_index].set_band(int(band))
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-        if 33 <= int(band) <= 46:
-            return cmx500.DuplexMode.TDD
-        else:
-            return cmx500.DuplexMode.FDD
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        if input_power > 23:
-            self.log.warning('Open loop supports -50dBm to 23 dBm. '
-                             'Setting it to max power 23 dBm')
-            input_power = 23
-        self.log.info('set input power to {}'.format(input_power))
-        self.bts[bts_index].set_ul_power(input_power)
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        self.log.info('set output power to {}'.format(output_power))
-        self.bts[bts_index].set_dl_power(output_power)
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """ Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tdd_config: the new tdd configuration number (from 0 to 6)
-        """
-        self.log.info('set tdd config to {}'.format(tdd_config))
-        self.bts[bts_index].set_tdd_config(tdd_config)
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number (from 0 to 9)
-        """
-        self.log.info('set ssf config to {}'.format(ssf_config))
-        self.bts[bts_index].set_ssf_config(ssf_config)
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth in MHz
-        """
-        self.log.info('set bandwidth of bts {} to {}'.format(
-            bts_index, bandwidth))
-        self.bts[bts_index].set_bandwidth(int(bandwidth))
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number (earfcn)
-        """
-        self.log.info(
-            'Sets the downlink channel number to {}'.format(channel_number))
-        self.bts[bts_index].set_dl_channel(channel_number)
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """ Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        self.log.info('set mimo mode to {}'.format(mimo_mode))
-        mimo_mode = CMX_MIMO_MAPPING[mimo_mode]
-        self.bts[bts_index].set_mimo_mode(mimo_mode)
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tmode: the new transmission mode
-        """
-        self.log.info('set TransmissionMode to {}'.format(tmode))
-        tmode = CMX_TM_MAPPING[tmode]
-        self.bts[bts_index].set_transmission_mode(tmode)
-
-    def set_scheduling_mode(self,
-                            bts_index,
-                            scheduling,
-                            mcs_dl=None,
-                            mcs_ul=None,
-                            nrb_dl=None,
-                            nrb_ul=None):
-        """ Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        if scheduling not in CMX_SCH_MAPPING:
-            raise cc.CellularSimulatorError(
-                "This scheduling mode is not supported")
-        log_list = []
-        if mcs_dl:
-            log_list.append('mcs_dl: {}'.format(mcs_dl))
-        if mcs_ul:
-            log_list.append('mcs_ul: {}'.format(mcs_ul))
-        if nrb_dl:
-            log_list.append('nrb_dl: {}'.format(nrb_dl))
-        if nrb_ul:
-            log_list.append('nrb_ul: {}'.format(nrb_ul))
-
-        self.log.info('set scheduling mode to {}'.format(','.join(log_list)))
-        self.bts[bts_index].set_scheduling_mode(mcs_dl=mcs_dl,
-                                                mcs_ul=mcs_ul,
-                                                nrb_dl=nrb_dl,
-                                                nrb_ul=nrb_ul)
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        self.log.info('Set 256 QAM DL MCS enabled: ' + str(enabled))
-        self.bts[bts_index].set_dl_modulation_table(
-            cmx500.ModulationType.Q256 if enabled else cmx500.ModulationType.
-            Q64)
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.log.info('Set 64 QAM UL MCS enabled: ' + str(enabled))
-        self.bts[bts_index].set_ul_modulation_table(
-            cmx500.ModulationType.Q64 if enabled else cmx500.ModulationType.Q16
-        )
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        self.log.info('set mac pad on {}'.format(mac_padding))
-        self.bts[bts_index].set_dl_mac_padding(mac_padding)
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        if cfi == 'BESTEFFORT':
-            self.log.info('The cfi is BESTEFFORT, use default value')
-            return
-        try:
-            index = int(cfi) + 1
-        except Exception as e:
-            index = 1
-        finally:
-            self.log.info('set the cfi and the cfi index is {}'.format(index))
-            self.bts[bts_index].set_cfi(index)
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        self.log.warning('The set_paging_cycle method is not implememted, '
-                         'use default value')
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.log.warning('The set_phich_resource method is not implememted, '
-                         'use default value')
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-        self.wait_until_communication_state()
-        self.bts[1].attach_as_secondary_cell()
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        self.log.info('wait until attached')
-        self.cmx.wait_until_attached(timeout)
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        Return:
-            True if cmx reach rrc state within timeout
-        Raise:
-            CmxError if tiemout
-        """
-        self.log.info('wait for rrc on state')
-        return self.cmx.wait_for_rrc_state(cmx500.RrcState.RRC_ON, timeout)
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        Return:
-            True if cmx reach rrc state within timeout
-        Raise:
-            CmxError if tiemout
-        """
-        self.log.info('wait for rrc off state')
-        return self.cmx.wait_for_rrc_state(cmx500.RrcState.RRC_OFF, timeout)
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        self.log.info('Bypass simulator detach step for now')
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        self.log.info('Stops current simulation and disconnect cmx500')
-        self.cmx.disconnect()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        self.log.warning('The start_data_traffic is not implemented yet')
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        self.log.warning('The stop_data_traffic is not implemented yet')
diff --git a/src/antlion/controllers/rohdeschwarz_lib/contest.py b/src/antlion/controllers/rohdeschwarz_lib/contest.py
deleted file mode 100644
index 39308e9..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/contest.py
+++ /dev/null
@@ -1,422 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from zeep import client
-from antlion.libs.proc import job
-from xml.etree import ElementTree
-import requests
-import asyncio
-import time
-import threading
-import re
-import os
-import logging
-
-
-class Contest(object):
-    """ Controller interface for Rohde Schwarz CONTEST sequencer software. """
-
-    # Remote Server parameter / operation names
-    TESTPLAN_PARAM = 'Testplan'
-    TESTPLAN_VERSION_PARAM = 'TestplanVersion'
-    KEEP_ALIVE_PARAM = 'KeepContestAlive'
-    START_TESTPLAN_OPERATION = 'StartTestplan'
-
-    # Results dictionary keys
-    POS_ERROR_KEY = 'pos_error'
-    TTFF_KEY = 'ttff'
-    SENSITIVITY_KEY = 'sensitivity'
-
-    # Waiting times
-    OUTPUT_WAITING_INTERVAL = 5
-
-    # Maximum number of times to retry if the Contest system is not responding
-    MAXIMUM_OUTPUT_READ_RETRIES = 25
-
-    # Root directory for the FTP server in the remote computer
-    FTP_ROOT = 'D:\\Logs\\'
-
-    def __init__(self, logger, remote_ip, remote_port, automation_listen_ip,
-                 automation_port, dut_on_func, dut_off_func, ftp_usr, ftp_pwd):
-        """
-        Initializes the Contest software controller.
-
-        Args:
-            logger: a logger handle.
-            remote_ip: the Remote Server's IP address.
-            remote_port: port number used by the Remote Server.
-            automation_listen_ip: local IP address in which to listen for
-                Automation Server connections.
-            automation_port: port used for Contest's DUT automation requests.
-            dut_on_func: function to turn the DUT on.
-            dut_off_func: function to turn the DUT off.
-            ftp_usr: username to login to the FTP server on the remote host
-            ftp_pwd: password to authenticate ftp_user in the ftp server
-        """
-        self.log = logger
-        self.ftp_user = ftp_usr
-        self.ftp_pass = ftp_pwd
-
-        self.remote_server_ip = remote_ip
-
-        server_url = 'http://{}:{}/RemoteServer'.format(remote_ip, remote_port)
-
-        # Initialize the SOAP client to interact with Contest's Remote Server
-        try:
-            self.soap_client = client.Client(server_url + '/RemoteServer?wsdl')
-        except requests.exceptions.ConnectionError:
-            self.log.error('Could not connect to the remote endpoint. Is '
-                           'Remote Server running on the Windows computer?')
-            raise
-
-        # Assign a value to asyncio_loop in case the automation server is not
-        # started
-        self.asyncio_loop = None
-
-        # Start the automation server if an IP and port number were passed
-        if automation_listen_ip and automation_port:
-            self.start_automation_server(automation_port, automation_listen_ip,
-                                         dut_on_func, dut_off_func)
-
-    def start_automation_server(self, automation_port, automation_listen_ip,
-                                dut_on_func, dut_off_func):
-        """ Starts the Automation server in a separate process.
-
-        Args:
-            automation_listen_ip: local IP address in which to listen for
-                Automation Server connections.
-            automation_port: port used for Contest's DUT automation requests.
-            dut_on_func: function to turn the DUT on.
-            dut_off_func: function to turn the DUT off.
-        """
-
-        # Start an asyncio event loop to run the automation server
-        self.asyncio_loop = asyncio.new_event_loop()
-
-        # Start listening for automation requests on a separate thread. This
-        # will start a new thread in which a socket will listen for incoming
-        # connections and react to Contest's automation commands
-
-        def start_automation_server(asyncio_loop):
-            AutomationServer(self.log, automation_port, automation_listen_ip,
-                             dut_on_func, dut_off_func, asyncio_loop)
-
-        automation_daemon = threading.Thread(
-            target=start_automation_server, args=[self.asyncio_loop])
-        automation_daemon.start()
-
-    def execute_testplan(self, testplan):
-        """ Executes a test plan with Contest's Remote Server sequencer.
-
-        Waits until and exit code is provided in the output. Logs the output with
-        the class logger and pulls the json report from the server if the test
-        succeeds.
-
-        Arg:
-            testplan: the test plan's name in the Contest system
-
-        Returns:
-            a dictionary with test results if the test finished successfully,
-            and None if it finished with an error exit code.
-        """
-
-        self.soap_client.service.DoSetParameterValue(self.TESTPLAN_PARAM,
-                                                     testplan)
-        self.soap_client.service.DoSetParameterValue(
-            self.TESTPLAN_VERSION_PARAM, 16)
-        self.soap_client.service.DoSetParameterValue(self.KEEP_ALIVE_PARAM,
-                                                     'true')
-
-        # Remote Server sometimes doesn't respond to the request immediately and
-        # frequently times out producing an exception. A shorter timeout will
-        # throw the exception earlier and allow the script to continue.
-        with self.soap_client.options(timeout=5):
-            try:
-                self.soap_client.service.DoStartOperation(
-                    self.START_TESTPLAN_OPERATION)
-            except requests.exceptions.ReadTimeout:
-                pass
-
-        self.log.info('Started testplan {} in Remote Server.'.format(testplan))
-
-        testplan_directory = None
-        read_retries = 0
-
-        while True:
-
-            time.sleep(self.OUTPUT_WAITING_INTERVAL)
-            output = self.soap_client.service.DoGetOutput()
-
-            # Output might be None while the instrument is busy.
-            if output:
-                self.log.debug(output)
-
-                # Obtain the path to the folder where reports generated by the
-                # test equipment will be stored in the remote computer
-                if not testplan_directory:
-                    prefix = re.escape('Testplan Directory: ' + self.FTP_ROOT)
-                    match = re.search('(?<={}).+(?=\\\\)'.format(prefix),
-                                      output)
-                    if match:
-                        testplan_directory = match.group(0)
-
-                # An exit code in the output indicates that the measurement is
-                # completed.
-                match = re.search('(?<=Exit code: )-?\d+', output)
-                if match:
-                    exit_code = int(match.group(0))
-                    break
-
-                # Reset the not-responding counter
-                read_retries = 0
-
-            else:
-                # If the output has been None for too many retries in a row,
-                # the testing instrument is assumed to be unresponsive.
-                read_retries += 1
-                if read_retries == self.MAXIMUM_OUTPUT_READ_RETRIES:
-                    raise RuntimeError('The Contest test sequencer is not '
-                                       'responding.')
-
-        self.log.info(
-            'Contest testplan finished with exit code {}.'.format(exit_code))
-
-        if exit_code in [0, 1]:
-            self.log.info('Testplan reports are stored in {}.'.format(
-                testplan_directory))
-
-            return self.pull_test_results(testplan_directory)
-
-    def pull_test_results(self, testplan_directory):
-        """ Downloads the test reports from the remote host and parses the test
-        summary to obtain the results.
-
-        Args:
-            testplan_directory: directory where to look for reports generated
-                by the test equipment in the remote computer
-
-        Returns:
-             a JSON object containing the test results
-        """
-
-        if not testplan_directory:
-            raise ValueError('Invalid testplan directory.')
-
-        # Download test reports from the remote host
-        job.run('wget -r --user={} --password={} -P {} ftp://{}/{}'.format(
-            self.ftp_user, self.ftp_pass, logging.log_path,
-            self.remote_server_ip, testplan_directory))
-
-        # Open the testplan directory
-        testplan_path = os.path.join(logging.log_path, self.remote_server_ip,
-                                     testplan_directory)
-
-        # Find the report.json file in the testcase folder
-        dir_list = os.listdir(testplan_path)
-        xml_path = None
-
-        for dir in dir_list:
-            if 'TestCaseName' in dir:
-                xml_path = os.path.join(testplan_path, dir,
-                                        'SummaryReport.xml')
-                break
-
-        if not xml_path:
-            raise RuntimeError('Could not find testcase directory.')
-
-        # Return the obtained report as a dictionary
-        xml_tree = ElementTree.ElementTree()
-        xml_tree.parse(source=xml_path)
-
-        results_dictionary = {}
-
-        col_iterator = xml_tree.iter('column')
-        for col in col_iterator:
-            # Look in the text of the first child for the required metrics
-            if col.text == '2D position error [m]':
-                results_dictionary[self.POS_ERROR_KEY] = {
-                    'min': float(next(col_iterator).text),
-                    'med': float(next(col_iterator).text),
-                    'avg': float(next(col_iterator).text),
-                    'max': float(next(col_iterator).text)
-                }
-            elif col.text == 'Time to first fix [s]':
-                results_dictionary[self.TTFF_KEY] = {
-                    'min': float(next(col_iterator).text),
-                    'med': float(next(col_iterator).text),
-                    'avg': float(next(col_iterator).text),
-                    'max': float(next(col_iterator).text)
-                }
-
-        message_iterator = xml_tree.iter('message')
-        for message in message_iterator:
-            # Look for the line showing sensitivity
-            if message.text:
-                # The typo in 'successfull' is intended as it is present in the
-                # test logs generated by the Contest system.
-                match = re.search('(?<=Margin search completed, the lowest '
-                                  'successfull output power is )-?\d+.?\d+'
-                                  '(?= dBm)', message.text)
-                if match:
-                    results_dictionary[self.SENSITIVITY_KEY] = float(
-                        match.group(0))
-                    break
-
-        return results_dictionary
-
-    def destroy(self):
-        """ Closes all open connections and kills running threads. """
-        if self.asyncio_loop:
-            # Stopping the asyncio loop will let the Automation Server exit
-            self.asyncio_loop.call_soon_threadsafe(self.asyncio_loop.stop)
-
-
-class AutomationServer:
-    """ Server object that handles DUT automation requests from Contest's Remote
-    Server.
-    """
-
-    def __init__(self, logger, port, listen_ip, dut_on_func, dut_off_func,
-                 asyncio_loop):
-        """ Initializes the Automation Server.
-
-        Opens a listening socket using a asyncio and waits for incoming
-        connections.
-
-        Args:
-            logger: a logger handle
-            port: port used for Contest's DUT automation requests
-            listen_ip: local IP in which to listen for connections
-            dut_on_func: function to turn the DUT on
-            dut_off_func: function to turn the DUT off
-            asyncio_loop: asyncio event loop to listen and process incoming
-                data asynchronously
-        """
-
-        self.log = logger
-
-        # Define a protocol factory that will provide new Protocol
-        # objects to the server created by asyncio. This Protocol
-        # objects will handle incoming commands
-        def aut_protocol_factory():
-            return self.AutomationProtocol(logger, dut_on_func, dut_off_func)
-
-        # Each client connection will create a new protocol instance
-        coro = asyncio_loop.create_server(aut_protocol_factory, listen_ip,
-                                          port)
-
-        self.server = asyncio_loop.run_until_complete(coro)
-
-        # Serve requests until Ctrl+C is pressed
-        self.log.info('Automation Server listening on {}'.format(
-            self.server.sockets[0].getsockname()))
-        asyncio_loop.run_forever()
-
-    class AutomationProtocol(asyncio.Protocol):
-        """ Defines the protocol for communication with Contest's Automation
-        client. """
-
-        AUTOMATION_DUT_ON = 'DUT_SWITCH_ON'
-        AUTOMATION_DUT_OFF = 'DUT_SWITCH_OFF'
-        AUTOMATION_OK = 'OK'
-
-        NOTIFICATION_TESTPLAN_START = 'AtTestplanStart'
-        NOTIFICATION_TESTCASE_START = 'AtTestcaseStart'
-        NOTIFICATION_TESCASE_END = 'AfterTestcase'
-        NOTIFICATION_TESTPLAN_END = 'AfterTestplan'
-
-        def __init__(self, logger, dut_on_func, dut_off_func):
-            """ Keeps the function handles to be used upon incoming requests.
-
-            Args:
-                logger: a logger handle
-                dut_on_func: function to turn the DUT on
-                dut_off_func: function to turn the DUT off
-            """
-
-            self.log = logger
-            self.dut_on_func = dut_on_func
-            self.dut_off_func = dut_off_func
-
-        def connection_made(self, transport):
-            """ Called when a connection has been established.
-
-            Args:
-                transport: represents the socket connection.
-            """
-
-            # Keep a reference to the transport as it will allow to write
-            # data to the socket later.
-            self.transport = transport
-
-            peername = transport.get_extra_info('peername')
-            self.log.info('Connection from {}'.format(peername))
-
-        def data_received(self, data):
-            """ Called when some data is received.
-
-            Args:
-                 data: non-empty bytes object containing the incoming data
-             """
-            command = data.decode()
-
-            # Remove the line break and newline characters at the end
-            command = re.sub('\r?\n$', '', command)
-
-            self.log.info("Command received from Contest's Automation "
-                          "client: {}".format(command))
-
-            if command == self.AUTOMATION_DUT_ON:
-                self.log.info("Contest's Automation client requested to set "
-                              "DUT to on state.")
-                self.send_ok()
-                self.dut_on_func()
-                return
-            elif command == self.AUTOMATION_DUT_OFF:
-                self.log.info("Contest's Automation client requested to set "
-                              "DUT to off state.")
-                self.dut_off_func()
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESTPLAN_START):
-                self.log.info('Test plan is starting.')
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESTCASE_START):
-                self.log.info('Test case is starting.')
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESCASE_END):
-                self.log.info('Test case finished.')
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESTPLAN_END):
-                self.log.info('Test plan finished.')
-                self.send_ok()
-            else:
-                self.log.error('Unhandled automation command: ' + command)
-                raise ValueError()
-
-        def send_ok(self):
-            """ Sends an OK message to the Automation server. """
-            self.log.info("Sending OK response to Contest's Automation client")
-            self.transport.write(
-                bytearray(
-                    self.AUTOMATION_OK + '\n',
-                    encoding='utf-8',
-                    ))
-
-        def eof_received(self):
-            """ Called when the other end signals it won’t send any more
-            data.
-            """
-            self.log.info('Received EOF from Contest Automation client.')
diff --git a/src/antlion/controllers/rohdeschwarz_lib/smbv100.py b/src/antlion/controllers/rohdeschwarz_lib/smbv100.py
deleted file mode 100644
index 10ec98c..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/smbv100.py
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Rohde & Schwarz SMBV100 Vector Signal Generator."""
-
-import numbers
-from antlion.controllers import abstract_inst
-
-
-class SMBV100Error(abstract_inst.SocketInstrumentError):
-    """SMBV100 Instrument Error Class."""
-
-
-class SMBV100(abstract_inst.SocketInstrument):
-    """SMBV100 Class, inherted from abstract_inst SocketInstrument."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for SMBV100.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        super(SMBV100, self).__init__(ip_addr, ip_port)
-
-        self.idn = ''
-
-    def connect(self):
-        """Init and Connect to SMBV100."""
-        self._connect_socket()
-
-        self.get_idn()
-
-        infmsg = 'Connected to SMBV100, with ID: {}'.format(self.idn)
-        self._logger.debug(infmsg)
-
-    def close(self):
-        """Close SMBV100."""
-        self._close_socket()
-
-        self._logger.debug('Closed connection to SMBV100')
-
-    def get_idn(self):
-        """Get the Idenification of SMBV100.
-
-        Returns:
-            SMBV100 Identifier
-        """
-        self.idn = self._query('*IDN?')
-
-        return self.idn
-
-    def preset(self):
-        """Preset SMBV100 to default status."""
-        self._send('*RST')
-
-        self._logger.debug('Preset SMBV100')
-
-    def set_rfout_state(self, state):
-        """set SMBV100 RF output state.
-
-        Args:
-            state: RF output state.
-                Type, str. Option, ON/OFF.
-
-        Raises:
-            SMBV100Error: raise when state is not ON/OFF.
-        """
-
-        if state not in ['ON', 'OFF']:
-            raise SMBV100Error(error='"state" input must be "ON" or "OFF"',
-                               command='set_rfout')
-
-        self._send(':OUTP ' + state)
-
-        infmsg = 'set SMBV100 RF output to "{}"'.format(state)
-        self._logger.debug(infmsg)
-
-    def set_rfout_freq(self, freq):
-        """set SMBV100 RF output frequency.
-
-        Args:
-            freq: RF output frequency.
-                Type, num.
-
-        Raises:
-            SMBV100Error: raise when 'freq' is not numerical value.
-        """
-
-        if not isinstance(freq, numbers.Number):
-            raise SMBV100Error(error='"freq" input must be numerical value',
-                               command='set_rfoutfreq')
-
-        self._send(':SOUR:FREQ:CW ' + str(freq))
-
-        infmsg = 'set SMBV100 RF output frequency to {} Hz'.format(freq)
-        self._logger.debug(infmsg)
-
-    def get_rfout_freq(self):
-        """get SMBV100 RF output frequency.
-
-        Return:
-            freq: RF output frequency.
-                Type, num.
-        """
-        resp = self._query(':SOUR:FREQ:CW?')
-
-        freq = float(resp.split(';')[0])
-
-        infmsg = 'get SMBV100 RF output frequency as {} Hz'.format(freq)
-        self._logger.debug(infmsg)
-
-        return freq
-
-    def set_rfout_level(self, level):
-        """set SMBV100 RF output level.
-
-        Args:
-            level: RF Level.
-                Type, num.
-
-        Raises:
-            SMBV100Error: raise when 'level' is not numerical value.
-        """
-
-        if not isinstance(level, numbers.Number):
-            raise SMBV100Error(error='"level" input must be numerical value',
-                               command='set_rflevel')
-
-        self._send(':SOUR:POW:LEV:IMM:AMPL ' + str(level))
-
-        infmsg = 'set SMBV100 RF level to {} dBm'.format(level)
-        self._logger.debug(infmsg)
-
-    def get_rfout_level(self):
-        """get SMBV100 RF out level.
-
-        Return:
-            level: RF Level.
-                Type, num.
-        """
-        resp = self._query(':SOUR:POW:LEV:IMM:AMPL?')
-
-        level = float(resp.split(';')[0])
-
-        infmsg = 'get SMBV100 RF level as {} dBm'.format(level)
-        self._logger.debug(infmsg)
-
-        return level
diff --git a/src/antlion/controllers/sl4a_lib/error_reporter.py b/src/antlion/controllers/sl4a_lib/error_reporter.py
index c36593b..e560567 100644
--- a/src/antlion/controllers/sl4a_lib/error_reporter.py
+++ b/src/antlion/controllers/sl4a_lib/error_reporter.py
@@ -31,7 +31,7 @@
 
     def process(self, msg, kwargs):
         """Transforms a log message to be in a given format."""
-        return '[Error Report|%s] %s' % (self.label, msg), kwargs
+        return "[Error Report|%s] %s" % (self.label, msg), kwargs
 
 
 class ErrorReporter(object):
@@ -74,13 +74,15 @@
             if not ticket:
                 return False
 
-            report = ErrorLogger('%s|%s' % (self.name, ticket))
-            report.info('Creating error report.')
+            report = ErrorLogger("%s|%s" % (self.name, ticket))
+            report.info("Creating error report.")
 
-            (self.report_on_adb(sl4a_manager.adb, report)
-             and self.report_device_processes(sl4a_manager.adb, report) and
-             self.report_sl4a_state(rpc_connection, sl4a_manager.adb, report)
-             and self.report_sl4a_session(sl4a_manager, sl4a_session, report))
+            (
+                self.report_on_adb(sl4a_manager.adb, report)
+                and self.report_device_processes(sl4a_manager.adb, report)
+                and self.report_sl4a_state(rpc_connection, sl4a_manager.adb, report)
+                and self.report_sl4a_session(sl4a_manager, sl4a_session, report)
+            )
 
             return True
         finally:
@@ -90,27 +92,31 @@
         """Creates an error report for ADB. Returns false if ADB has failed."""
         adb_uptime = utils.get_command_uptime('"adb .* server"')
         if adb_uptime:
-            report.info('The adb daemon has an uptime of %s '
-                        '([[dd-]hh:]mm:ss).' % adb_uptime)
+            report.info(
+                "The adb daemon has an uptime of %s " "([[dd-]hh:]mm:ss)." % adb_uptime
+            )
         else:
-            report.warning('The adb daemon (on the host machine) is not '
-                           'running. All forwarded ports have been removed.')
+            report.warning(
+                "The adb daemon (on the host machine) is not "
+                "running. All forwarded ports have been removed."
+            )
             return False
 
         devices_output = adb.devices()
         if adb.serial not in devices_output:
             report.warning(
-                'This device cannot be found by ADB. The device may have shut '
-                'down or disconnected.')
+                "This device cannot be found by ADB. The device may have shut "
+                "down or disconnected."
+            )
             return False
-        elif re.findall(r'%s\s+offline' % adb.serial, devices_output):
+        elif re.findall(r"%s\s+offline" % adb.serial, devices_output):
             report.warning(
-                'The device is marked as offline in ADB. We are no longer able '
-                'to access the device.')
+                "The device is marked as offline in ADB. We are no longer able "
+                "to access the device."
+            )
             return False
         else:
-            report.info(
-                'The device is online and accessible through ADB calls.')
+            report.info("The device is online and accessible through ADB calls.")
         return True
 
     def report_device_processes(self, adb, report):
@@ -119,89 +125,102 @@
         Returns:
             False iff user-apks cannot be communicated with over tcp.
         """
-        zygote_uptime = utils.get_device_process_uptime(adb, 'zygote')
+        zygote_uptime = utils.get_device_process_uptime(adb, "zygote")
         if zygote_uptime:
             report.info(
-                'Zygote has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is low, the phone may have recently crashed.' %
-                zygote_uptime)
+                "Zygote has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is low, the phone may have recently crashed." % zygote_uptime
+            )
         else:
             report.warning(
-                'Zygote has been killed. It is likely the Android Runtime has '
-                'crashed. Check the bugreport/logcat for more information.')
+                "Zygote has been killed. It is likely the Android Runtime has "
+                "crashed. Check the bugreport/logcat for more information."
+            )
             return False
 
-        netd_uptime = utils.get_device_process_uptime(adb, 'netd')
+        netd_uptime = utils.get_device_process_uptime(adb, "netd")
         if netd_uptime:
             report.info(
-                'Netd has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is low, the phone may have recently crashed.' %
-                zygote_uptime)
+                "Netd has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is low, the phone may have recently crashed." % zygote_uptime
+            )
         else:
             report.warning(
-                'Netd has been killed. The Android Runtime may have crashed. '
-                'Check the bugreport/logcat for more information.')
+                "Netd has been killed. The Android Runtime may have crashed. "
+                "Check the bugreport/logcat for more information."
+            )
             return False
 
-        adbd_uptime = utils.get_device_process_uptime(adb, 'adbd')
+        adbd_uptime = utils.get_device_process_uptime(adb, "adbd")
         if netd_uptime:
             report.info(
-                'Adbd has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is low, the phone may have recently crashed.' %
-                adbd_uptime)
+                "Adbd has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is low, the phone may have recently crashed." % adbd_uptime
+            )
         else:
-            report.warning('Adbd is not running.')
+            report.warning("Adbd is not running.")
             return False
         return True
 
     def report_sl4a_state(self, rpc_connection, adb, report):
         """Creates an error report for the state of SL4A."""
-        report.info(
-            'Diagnosing Failure over connection %s.' % rpc_connection.ports)
+        report.info("Diagnosing Failure over connection %s." % rpc_connection.ports)
 
         ports = rpc_connection.ports
-        forwarded_ports_output = adb.forward('--list')
+        forwarded_ports_output = adb.forward("--list")
 
-        expected_output = '%s tcp:%s tcp:%s' % (
-            adb.serial, ports.forwarded_port, ports.server_port)
+        expected_output = "%s tcp:%s tcp:%s" % (
+            adb.serial,
+            ports.forwarded_port,
+            ports.server_port,
+        )
         if expected_output not in forwarded_ports_output:
             formatted_output = re.sub(
-                '^', '    ', forwarded_ports_output, flags=re.MULTILINE)
+                "^", "    ", forwarded_ports_output, flags=re.MULTILINE
+            )
             report.warning(
-                'The forwarded port for the failed RpcConnection is missing.\n'
-                'Expected:\n    %s\nBut found:\n%s' % (expected_output,
-                                                       formatted_output))
+                "The forwarded port for the failed RpcConnection is missing.\n"
+                "Expected:\n    %s\nBut found:\n%s"
+                % (expected_output, formatted_output)
+            )
             return False
         else:
-            report.info('The connection port has been properly forwarded to '
-                        'the device.')
+            report.info(
+                "The connection port has been properly forwarded to " "the device."
+            )
 
         sl4a_uptime = utils.get_device_process_uptime(
-            adb, 'com.googlecode.android_scripting')
+            adb, "com.googlecode.android_scripting"
+        )
         if sl4a_uptime:
             report.info(
-                'SL4A has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is lower than the test case, it must have been '
-                'restarted during the test.' % sl4a_uptime)
+                "SL4A has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is lower than the test case, it must have been "
+                "restarted during the test." % sl4a_uptime
+            )
         else:
             report.warning(
-                'The SL4A scripting service is not running. SL4A may have '
-                'crashed, or have been terminated by the Android Runtime.')
+                "The SL4A scripting service is not running. SL4A may have "
+                "crashed, or have been terminated by the Android Runtime."
+            )
             return False
         return True
 
     def report_sl4a_session(self, sl4a_manager, session, report):
         """Reports the state of an SL4A session."""
         if session.server_port not in sl4a_manager.sl4a_ports_in_use:
-            report.warning('SL4A server port %s not found in set of open '
-                           'ports %s' % (session.server_port,
-                                         sl4a_manager.sl4a_ports_in_use))
+            report.warning(
+                "SL4A server port %s not found in set of open "
+                "ports %s" % (session.server_port, sl4a_manager.sl4a_ports_in_use)
+            )
             return False
 
         if session not in sl4a_manager.sessions.values():
-            report.warning('SL4A session %s over port %s is not managed by '
-                           'the SL4A Manager. This session is already dead.' %
-                           (session.uid, session.server_port))
+            report.warning(
+                "SL4A session %s over port %s is not managed by "
+                "the SL4A Manager. This session is already dead."
+                % (session.uid, session.server_port)
+            )
             return False
         return True
 
@@ -209,11 +228,11 @@
         self._accept_requests = False
         while self._current_request_count > 0:
             # Wait for other threads to finish.
-            time.sleep(.1)
+            time.sleep(0.1)
 
     def _get_report_ticket(self):
         """Returns the next ticket, or none if all tickets have been used."""
-        logging.debug('Getting ticket for SL4A error report.')
+        logging.debug("Getting ticket for SL4A error report.")
         with self._ticket_lock:
             self._ticket_number += 1
             ticket_number = self._ticket_number
diff --git a/src/antlion/controllers/sl4a_lib/event_dispatcher.py b/src/antlion/controllers/sl4a_lib/event_dispatcher.py
index 4cdce8f..503923c 100644
--- a/src/antlion/controllers/sl4a_lib/event_dispatcher.py
+++ b/src/antlion/controllers/sl4a_lib/event_dispatcher.py
@@ -65,8 +65,11 @@
 
         def _log_formatter(message):
             """Defines the formatting used in the logger."""
-            return '[E Dispatcher|%s|%s] %s' % (self._serial,
-                                                self._rpc_client.uid, message)
+            return "[E Dispatcher|%s|%s] %s" % (
+                self._serial,
+                self._rpc_client.uid,
+                message,
+            )
 
         self.log = logger.create_logger(_log_formatter)
 
@@ -84,32 +87,33 @@
                 event_obj = self._rpc_client.eventWait(60000, timeout=120)
             except rpc_client.Sl4aConnectionError as e:
                 if self._rpc_client.is_alive:
-                    self.log.warning('Closing due to closed session.')
+                    self.log.warning("Closing due to closed session.")
                     break
                 else:
-                    self.log.warning('Closing due to error: %s.' % e)
+                    self.log.warning("Closing due to error: %s." % e)
                     self.close()
                     raise e
             if not event_obj:
                 continue
-            elif 'name' not in event_obj:
-                self.log.error('Received Malformed event {}'.format(event_obj))
+            elif "name" not in event_obj:
+                self.log.error("Received Malformed event {}".format(event_obj))
                 continue
             else:
-                event_name = event_obj['name']
+                event_name = event_obj["name"]
             # if handler registered, process event
-            if event_name == 'EventDispatcherShutdown':
-                self.log.debug('Received shutdown signal.')
+            if event_name == "EventDispatcherShutdown":
+                self.log.debug("Received shutdown signal.")
                 # closeSl4aSession has been called, which closes the event
                 # dispatcher. Stop execution on this polling thread.
                 return
             if event_name in self._handlers:
                 self.log.debug(
-                    'Using handler %s for event: %r' %
-                    (self._handlers[event_name].__name__, event_obj))
+                    "Using handler %s for event: %r"
+                    % (self._handlers[event_name].__name__, event_obj)
+                )
                 self.handle_subscribed_event(event_obj, event_name)
             else:
-                self.log.debug('Queuing event: %r' % event_obj)
+                self.log.debug("Queuing event: %r" % event_obj)
                 self._lock.acquire()
                 if event_name in self._event_dict:  # otherwise, cache event
                     self._event_dict[event_name].put(event_obj)
@@ -136,13 +140,15 @@
                 handler for one type of event.
         """
         if self._started:
-            raise IllegalStateError('Cannot register service after polling is '
-                                    'started.')
+            raise IllegalStateError(
+                "Cannot register service after polling is " "started."
+            )
         self._lock.acquire()
         try:
             if event_name in self._handlers:
                 raise DuplicateError(
-                    'A handler for {} already exists'.format(event_name))
+                    "A handler for {} already exists".format(event_name)
+                )
             self._handlers[event_name] = (handler, args)
         finally:
             self._lock.release()
@@ -195,14 +201,14 @@
                 starts polling.
         """
         if not self._started:
-            raise IllegalStateError(
-                'Dispatcher needs to be started before popping.')
+            raise IllegalStateError("Dispatcher needs to be started before popping.")
 
         e_queue = self.get_event_q(event_name)
 
         if not e_queue:
             raise IllegalStateError(
-                'Failed to get an event queue for {}'.format(event_name))
+                "Failed to get an event queue for {}".format(event_name)
+            )
 
         try:
             # Block for timeout
@@ -215,17 +221,13 @@
                 # Block forever on event wait
                 return e_queue.get(True)
         except queue.Empty:
-            msg = 'Timeout after {}s waiting for event: {}'.format(
-                timeout, event_name)
+            msg = "Timeout after {}s waiting for event: {}".format(timeout, event_name)
             self.log.info(msg)
             raise queue.Empty(msg)
 
-    def wait_for_event(self,
-                       event_name,
-                       predicate,
-                       timeout=DEFAULT_TIMEOUT,
-                       *args,
-                       **kwargs):
+    def wait_for_event(
+        self, event_name, predicate, timeout=DEFAULT_TIMEOUT, *args, **kwargs
+    ):
         """Wait for an event that satisfies a predicate to appear.
 
         Continuously pop events of a particular name and check against the
@@ -252,15 +254,15 @@
         """
         deadline = time.time() + timeout
         ignored_events = []
-        consume_events = kwargs.pop('consume_ignored_events', True)
+        consume_events = kwargs.pop("consume_ignored_events", True)
         while True:
             event = None
             try:
                 event = self.pop_event(event_name, 1)
                 if consume_events:
-                    self.log.debug('Consuming event: %r' % event)
+                    self.log.debug("Consuming event: %r" % event)
                 else:
-                    self.log.debug('Peeking at event: %r' % event)
+                    self.log.debug("Peeking at event: %r" % event)
                     ignored_events.append(event)
             except queue.Empty:
                 pass
@@ -268,15 +270,17 @@
             if event and predicate(event, *args, **kwargs):
                 for ignored_event in ignored_events:
                     self.get_event_q(event_name).put(ignored_event)
-                self.log.debug('Matched event: %r with %s' %
-                               (event, predicate.__name__))
+                self.log.debug(
+                    "Matched event: %r with %s" % (event, predicate.__name__)
+                )
                 return event
 
             if time.time() > deadline:
                 for ignored_event in ignored_events:
                     self.get_event_q(event_name).put(ignored_event)
-                msg = 'Timeout after {}s waiting for event: {}'.format(
-                    timeout, event_name)
+                msg = "Timeout after {}s waiting for event: {}".format(
+                    timeout, event_name
+                )
                 self.log.info(msg)
                 raise queue.Empty(msg)
 
@@ -305,8 +309,7 @@
             queue.Empty: Raised if no event was found before time out.
         """
         if not self._started:
-            raise IllegalStateError(
-                "Dispatcher needs to be started before popping.")
+            raise IllegalStateError("Dispatcher needs to be started before popping.")
         deadline = time.time() + timeout
         while True:
             # TODO: fix the sleep loop
@@ -315,12 +318,13 @@
                 break
             time.sleep(freq)
         if len(results) == 0:
-            msg = 'Timeout after {}s waiting for event: {}'.format(
-                timeout, regex_pattern)
+            msg = "Timeout after {}s waiting for event: {}".format(
+                timeout, regex_pattern
+            )
             self.log.error(msg)
             raise queue.Empty(msg)
 
-        return sorted(results, key=lambda event: event['time'])
+        return sorted(results, key=lambda event: event["time"])
 
     def _match_and_pop(self, regex_pattern):
         """Pop one event from each of the event queues whose names
@@ -347,8 +351,7 @@
         Returns: A queue storing all the events of the specified name.
         """
         self._lock.acquire()
-        if (event_name not in self._event_dict
-                or self._event_dict[event_name] is None):
+        if event_name not in self._event_dict or self._event_dict[event_name] is None:
             self._event_dict[event_name] = queue.Queue()
         self._lock.release()
 
@@ -368,8 +371,9 @@
         handler, args = self._handlers[event_name]
         self._executor.submit(handler, event_obj, *args)
 
-    def _handle(self, event_handler, event_name, user_args, event_timeout,
-                cond, cond_timeout):
+    def _handle(
+        self, event_handler, event_name, user_args, event_timeout, cond, cond_timeout
+    ):
         """Pop an event of specified type and calls its handler on it. If
         condition is not None, block until condition is met or timeout.
         """
@@ -378,13 +382,15 @@
         event = self.pop_event(event_name, event_timeout)
         return event_handler(event, *user_args)
 
-    def handle_event(self,
-                     event_handler,
-                     event_name,
-                     user_args,
-                     event_timeout=None,
-                     cond=None,
-                     cond_timeout=None):
+    def handle_event(
+        self,
+        event_handler,
+        event_name,
+        user_args,
+        event_timeout=None,
+        cond=None,
+        cond_timeout=None,
+    ):
         """Handle events that don't have registered handlers
 
         In a new thread, poll one event of specified type from its queue and
@@ -408,9 +414,15 @@
                 If blocking call worker.result() is triggered, the handler
                 needs to return something to unblock.
         """
-        worker = self._executor.submit(self._handle, event_handler, event_name,
-                                       user_args, event_timeout, cond,
-                                       cond_timeout)
+        worker = self._executor.submit(
+            self._handle,
+            event_handler,
+            event_name,
+            user_args,
+            event_timeout,
+            cond,
+            cond_timeout,
+        )
         return worker
 
     def pop_all(self, event_name):
@@ -430,8 +442,9 @@
                 starts polling.
         """
         if not self._started:
-            raise IllegalStateError(("Dispatcher needs to be started before "
-                                     "popping."))
+            raise IllegalStateError(
+                ("Dispatcher needs to be started before " "popping.")
+            )
         results = []
         try:
             self._lock.acquire()
@@ -469,7 +482,7 @@
 
     def is_event_match_for_list(self, event, field, value_list):
         try:
-            value_in_event = event['data'][field]
+            value_in_event = event["data"][field]
         except KeyError:
             return False
         for value in value_list:
diff --git a/src/antlion/controllers/sl4a_lib/rpc_client.py b/src/antlion/controllers/sl4a_lib/rpc_client.py
index fd45c04..cc2cee2 100644
--- a/src/antlion/controllers/sl4a_lib/rpc_client.py
+++ b/src/antlion/controllers/sl4a_lib/rpc_client.py
@@ -28,6 +28,7 @@
 # The Session UID when a UID has not been received yet.
 UNKNOWN_UID = -1
 
+
 class Sl4aException(error.ActsError):
     """The base class for all SL4A exceptions."""
 
@@ -48,7 +49,7 @@
         data: The extra data, if any, returned by SL4A.
     """
 
-    def __init__(self, message, code=-1, data=None, rpc_name=''):
+    def __init__(self, message, code=-1, data=None, rpc_name=""):
         super().__init__()
         self.message = message
         self.code = code
@@ -60,11 +61,14 @@
 
     def __str__(self):
         if self.data:
-            return 'Error in RPC %s %s:%s:%s' % (self.rpc_name, self.code,
-                                                 self.message, self.data)
+            return "Error in RPC %s %s:%s:%s" % (
+                self.rpc_name,
+                self.code,
+                self.message,
+                self.data,
+            )
         else:
-            return 'Error in RPC %s %s:%s' % (self.rpc_name, self.code,
-                                              self.message)
+            return "Error in RPC %s %s:%s" % (self.rpc_name, self.code, self.message)
 
 
 class Sl4aConnectionError(Sl4aException):
@@ -73,9 +77,10 @@
 
 class Sl4aProtocolError(Sl4aException):
     """Raised when there an error in exchanging data with server on device."""
-    NO_RESPONSE_FROM_HANDSHAKE = 'No response from handshake.'
-    NO_RESPONSE_FROM_SERVER = 'No response from server.'
-    MISMATCHED_API_ID = 'Mismatched API id.'
+
+    NO_RESPONSE_FROM_HANDSHAKE = "No response from handshake."
+    NO_RESPONSE_FROM_SERVER = "No response from server."
+    MISMATCHED_API_ID = "Mismatched API id."
 
 
 class Sl4aNotInstalledError(Sl4aException):
@@ -98,6 +103,7 @@
             modify the thread pool size being used for self.future RPC calls.
         _log: The logger for this RpcClient.
     """
+
     """The default value for the maximum amount of connections for a client."""
     DEFAULT_MAX_CONNECTION = 15
 
@@ -112,7 +118,8 @@
         def __init__(self, rpc_client):
             self._rpc_client = rpc_client
             self._executor = futures.ThreadPoolExecutor(
-                max_workers=max(rpc_client.max_connections - 2, 1))
+                max_workers=max(rpc_client.max_connections - 2, 1)
+            )
 
         def rpc(self, name, *args, **kwargs):
             future = self._executor.submit(name, *args, **kwargs)
@@ -123,17 +130,20 @@
 
             def rpc_call(*args, **kwargs):
                 future = self._executor.submit(
-                    self._rpc_client.__getattr__(name), *args, **kwargs)
+                    self._rpc_client.__getattr__(name), *args, **kwargs
+                )
                 return future
 
             return rpc_call
 
-    def __init__(self,
-                 uid,
-                 serial,
-                 on_error_callback,
-                 _create_connection_func,
-                 max_connections=None):
+    def __init__(
+        self,
+        uid,
+        serial,
+        on_error_callback,
+        _create_connection_func,
+        max_connections=None,
+    ):
         """Creates a new RpcClient object.
 
         Args:
@@ -155,7 +165,7 @@
 
         def _log_formatter(message):
             """Formats the message to be logged."""
-            return '[RPC Service|%s|%s] %s' % (self._serial, self.uid, message)
+            return "[RPC Service|%s|%s] %s" % (self._serial, self.uid, message)
 
         self._log = logger.create_logger(_log_formatter)
 
@@ -172,13 +182,13 @@
         """Terminates all connections to the SL4A server."""
         if len(self._working_connections) > 0:
             self._log.warning(
-                '%s connections are still active, and waiting on '
-                'responses.Closing these connections now.' % len(
-                    self._working_connections))
+                "%s connections are still active, and waiting on "
+                "responses.Closing these connections now."
+                % len(self._working_connections)
+            )
         connections = self._free_connections + self._working_connections
         for connection in connections:
-            self._log.debug(
-                'Closing connection over ports %s' % connection.ports)
+            self._log.debug("Closing connection over ports %s" % connection.ports)
             connection.close()
         self._free_connections = []
         self._working_connections = []
@@ -201,17 +211,17 @@
                     self._working_connections.append(client)
                     return client
 
-            client_count = (len(self._free_connections) +
-                            len(self._working_connections))
+            client_count = len(self._free_connections) + len(self._working_connections)
             if client_count < self.max_connections:
                 with self._lock:
-                    client_count = (len(self._free_connections) +
-                                    len(self._working_connections))
+                    client_count = len(self._free_connections) + len(
+                        self._working_connections
+                    )
                     if client_count < self.max_connections:
                         client = self._create_connection_func(self.uid)
                         self._working_connections.append(client)
                         return client
-            time.sleep(.01)
+            time.sleep(0.01)
 
     def _release_working_connection(self, connection):
         """Marks a working client as free.
@@ -250,9 +260,9 @@
         timed_out = False
         if timeout:
             connection.set_timeout(timeout)
-        data = {'id': ticket, 'method': method, 'params': args}
+        data = {"id": ticket, "method": method, "params": args}
         request = json.dumps(data)
-        response = ''
+        response = ""
         try:
             for i in range(1, retries + 1):
                 connection.send_request(request)
@@ -261,36 +271,43 @@
                 if not response:
                     if i < retries:
                         self._log.warning(
-                            'No response for RPC method %s on iteration %s',
-                            method, i)
+                            "No response for RPC method %s on iteration %s", method, i
+                        )
                         continue
                     else:
                         self._log.exception(
-                            'No response for RPC method %s on iteration %s',
-                            method, i)
+                            "No response for RPC method %s on iteration %s", method, i
+                        )
                         self.on_error(connection)
                         raise Sl4aProtocolError(
-                            Sl4aProtocolError.NO_RESPONSE_FROM_SERVER)
+                            Sl4aProtocolError.NO_RESPONSE_FROM_SERVER
+                        )
                 else:
                     break
         except BrokenPipeError as e:
             if self.is_alive:
-                self._log.exception('The device disconnected during RPC call '
-                                    '%s. Please check the logcat for a crash '
-                                    'or disconnect.', method)
+                self._log.exception(
+                    "The device disconnected during RPC call "
+                    "%s. Please check the logcat for a crash "
+                    "or disconnect.",
+                    method,
+                )
                 self.on_error(connection)
             else:
-                self._log.warning('The connection was killed during cleanup:')
+                self._log.warning("The connection was killed during cleanup:")
                 self._log.warning(e)
             raise Sl4aConnectionError(e)
         except socket.timeout as err:
             # If a socket connection has timed out, the socket can no longer be
             # used. Close it out and remove the socket from the connection pool.
             timed_out = True
-            self._log.warning('RPC "%s" (id: %s) timed out after %s seconds.',
-                              method, ticket, timeout or SOCKET_TIMEOUT)
-            self._log.debug(
-                'Closing timed out connection over %s' % connection.ports)
+            self._log.warning(
+                'RPC "%s" (id: %s) timed out after %s seconds.',
+                method,
+                ticket,
+                timeout or SOCKET_TIMEOUT,
+            )
+            self._log.debug("Closing timed out connection over %s" % connection.ports)
             connection.close()
             self._working_connections.remove(connection)
             # Re-raise the error as an SL4A Error so end users can process it.
@@ -300,26 +317,29 @@
                 if timeout:
                     connection.set_timeout(SOCKET_TIMEOUT)
                 self._release_working_connection(connection)
-        result = json.loads(str(response, encoding='utf8'))
+        result = json.loads(str(response, encoding="utf8"))
 
-        if result['error']:
-            error_object = result['error']
+        if result["error"]:
+            error_object = result["error"]
             if isinstance(error_object, dict):
                 # Uses JSON-RPC 2.0 Format
-                sl4a_api_error = Sl4aApiError(error_object.get('message', None),
-                                              error_object.get('code', -1),
-                                              error_object.get('data', {}),
-                                              rpc_name=method)
+                sl4a_api_error = Sl4aApiError(
+                    error_object.get("message", None),
+                    error_object.get("code", -1),
+                    error_object.get("data", {}),
+                    rpc_name=method,
+                )
             else:
                 # Fallback on JSON-RPC 1.0 Format
                 sl4a_api_error = Sl4aApiError(error_object, rpc_name=method)
             self._log.warning(sl4a_api_error)
             raise sl4a_api_error
-        if result['id'] != ticket:
-            self._log.error('RPC method %s with mismatched api id %s', method,
-                            result['id'])
+        if result["id"] != ticket:
+            self._log.error(
+                "RPC method %s with mismatched api id %s", method, result["id"]
+            )
             raise Sl4aProtocolError(Sl4aProtocolError.MISMATCHED_API_ID)
-        return result['result']
+        return result["result"]
 
     @property
     def future(self):
@@ -353,6 +373,7 @@
 
         if not self.is_alive:
             raise Sl4aStartError(
-                'This SL4A session has already been terminated. You must '
-                'create a new session to continue.')
+                "This SL4A session has already been terminated. You must "
+                "create a new session to continue."
+            )
         return rpc_call
diff --git a/src/antlion/controllers/sl4a_lib/rpc_connection.py b/src/antlion/controllers/sl4a_lib/rpc_connection.py
index b8cb8cf..6b9bf25 100644
--- a/src/antlion/controllers/sl4a_lib/rpc_connection.py
+++ b/src/antlion/controllers/sl4a_lib/rpc_connection.py
@@ -30,8 +30,9 @@
     INIT: Initializes a new sessions in sl4a.
     CONTINUE: Creates a connection.
     """
-    INIT = 'initiate'
-    CONTINUE = 'continue'
+
+    INIT = "initiate"
+    CONTINUE = "continue"
 
 
 class RpcConnection(object):
@@ -58,9 +59,12 @@
 
         def _log_formatter(message):
             """Defines the formatting used in the logger."""
-            return '[SL4A Client|%s|%s|%s] %s' % (self.adb.serial,
-                                                  self.ports.client_port,
-                                                  self.uid, message)
+            return "[SL4A Client|%s|%s|%s] %s" % (
+                self.adb.serial,
+                self.ports.client_port,
+                self.uid,
+                message,
+            )
 
         self.log = logger.create_logger(_log_formatter)
 
@@ -84,19 +88,19 @@
         try:
             resp = self._cmd(start_command)
         except socket.timeout as e:
-            self.log.error('Failed to open socket connection: %s', e)
+            self.log.error("Failed to open socket connection: %s", e)
             raise
         if not resp:
             raise rpc_client.Sl4aProtocolError(
-                rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE)
-        result = json.loads(str(resp, encoding='utf8'))
-        if result['status']:
-            self.uid = result['uid']
+                rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE
+            )
+        result = json.loads(str(resp, encoding="utf8"))
+        if result["status"]:
+            self.uid = result["uid"]
         else:
-            self.log.warning(
-                'UID not received for connection %s.' % self.ports)
+            self.log.warning("UID not received for connection %s." % self.ports)
             self.uid = UNKNOWN_UID
-        self.log.debug('Created connection over: %s.' % self.ports)
+        self.log.debug("Created connection over: %s." % self.ports)
 
     def _cmd(self, command):
         """Sends an session protocol command to SL4A to establish communication.
@@ -107,7 +111,7 @@
         Returns:
             The line that was written back.
         """
-        self.send_request(json.dumps({'cmd': command, 'uid': self.uid}))
+        self.send_request(json.dumps({"cmd": command, "uid": self.uid}))
         return self.get_response()
 
     def get_new_ticket(self):
@@ -123,14 +127,14 @@
 
     def send_request(self, request):
         """Sends a request over the connection."""
-        self._socket_file.write(request.encode('utf8') + b'\n')
+        self._socket_file.write(request.encode("utf8") + b"\n")
         self._socket_file.flush()
-        self.log.debug('Sent: ' + request)
+        self.log.debug("Sent: " + request)
 
     def get_response(self):
         """Returns the first response sent back to the client."""
         data = self._socket_file.readline()
-        self.log.debug('Received: ' + data.decode('utf8', errors='replace'))
+        self.log.debug("Received: " + data.decode("utf8", errors="replace"))
         return data
 
     def close(self):
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_manager.py b/src/antlion/controllers/sl4a_lib/sl4a_manager.py
index 8749702..f3d7047 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_manager.py
+++ b/src/antlion/controllers/sl4a_lib/sl4a_manager.py
@@ -22,25 +22,27 @@
 from antlion.controllers.sl4a_lib import sl4a_session
 from antlion.controllers.sl4a_lib import error_reporter
 
-ATTEMPT_INTERVAL = .25
+ATTEMPT_INTERVAL = 0.25
 MAX_WAIT_ON_SERVER_SECONDS = 5
 
-SL4A_PKG_NAME = 'com.googlecode.android_scripting'
+SL4A_PKG_NAME = "com.googlecode.android_scripting"
 
 _SL4A_LAUNCH_SERVER_CMD = (
-    'am startservice -a com.googlecode.android_scripting.action.LAUNCH_SERVER '
-    '--ei com.googlecode.android_scripting.extra.USE_SERVICE_PORT %s '
-    'com.googlecode.android_scripting/.service.ScriptingLayerService')
+    "am startservice -a com.googlecode.android_scripting.action.LAUNCH_SERVER "
+    "--ei com.googlecode.android_scripting.extra.USE_SERVICE_PORT %s "
+    "com.googlecode.android_scripting/.service.ScriptingLayerService"
+)
 
 _SL4A_CLOSE_SERVER_CMD = (
-    'am startservice -a com.googlecode.android_scripting.action.KILL_PROCESS '
-    '--ei com.googlecode.android_scripting.extra.PROXY_PORT %s '
-    'com.googlecode.android_scripting/.service.ScriptingLayerService')
+    "am startservice -a com.googlecode.android_scripting.action.KILL_PROCESS "
+    "--ei com.googlecode.android_scripting.extra.PROXY_PORT %s "
+    "com.googlecode.android_scripting/.service.ScriptingLayerService"
+)
 
 # The command for finding SL4A's server port as root.
 _SL4A_ROOT_FIND_PORT_CMD = (
     # Get all open, listening ports, and their process names
-    'ss -l -p -n | '
+    "ss -l -p -n | "
     # Find all open TCP ports for SL4A
     'grep "tcp.*droid_scripting" | '
     # Shorten all whitespace to a single space character
@@ -48,12 +50,13 @@
     # Grab the 5th column (which is server:port)
     'cut -d " " -f 5 |'
     # Only grab the port
-    'sed s/.*://g')
+    "sed s/.*://g"
+)
 
 # The command for finding SL4A's server port without root.
 _SL4A_USER_FIND_PORT_CMD = (
     # Get all open, listening ports, and their process names
-    'ss -l -p -n | '
+    "ss -l -p -n | "
     # Find all open ports exposed to the public. This can produce false
     # positives since users cannot read the process associated with the port.
     'grep -e "tcp.*::ffff:127\.0\.0\.1:" | '
@@ -62,12 +65,13 @@
     # Grab the 5th column (which is server:port)
     'cut -d " " -f 5 |'
     # Only grab the port
-    'sed s/.*://g')
+    "sed s/.*://g"
+)
 
 # The command that begins the SL4A ScriptingLayerService.
 _SL4A_START_SERVICE_CMD = (
-    'am startservice '
-    'com.googlecode.android_scripting/.service.ScriptingLayerService')
+    "am startservice " "com.googlecode.android_scripting/.service.ScriptingLayerService"
+)
 
 # Maps device serials to their SL4A Manager. This is done to prevent multiple
 # Sl4aManagers from existing for the same device.
@@ -82,8 +86,9 @@
     """
     if adb.serial in _all_sl4a_managers:
         _all_sl4a_managers[adb.serial].log.warning(
-            'Attempted to return multiple SL4AManagers on the same device. '
-            'Returning pre-existing SL4AManager instead.')
+            "Attempted to return multiple SL4AManagers on the same device. "
+            "Returning pre-existing SL4AManager instead."
+        )
         return _all_sl4a_managers[adb.serial]
     else:
         manager = Sl4aManager(adb)
@@ -111,12 +116,12 @@
         self._listen_for_port_lock = threading.Lock()
         self._sl4a_ports = set()
         self.adb = adb
-        self.log = logger.create_logger(lambda msg: '[SL4A Manager|%s] %s' % (
-            adb.serial, msg))
+        self.log = logger.create_logger(
+            lambda msg: "[SL4A Manager|%s] %s" % (adb.serial, msg)
+        )
         self.sessions = {}
         self._started = False
-        self.error_reporter = error_reporter.ErrorReporter('SL4A %s' %
-                                                           adb.serial)
+        self.error_reporter = error_reporter.ErrorReporter("SL4A %s" % adb.serial)
 
     @property
     def sl4a_ports_in_use(self):
@@ -162,9 +167,9 @@
                 return port
 
         raise rpc_client.Sl4aConnectionError(
-            'Unable to find a valid open port for a new server connection. '
-            'Expected port: %s. Open ports: %s' %
-            (device_port, self._sl4a_ports))
+            "Unable to find a valid open port for a new server connection. "
+            "Expected port: %s. Open ports: %s" % (device_port, self._sl4a_ports)
+        )
 
     def _get_all_ports_command(self):
         """Returns the list of all ports from the command to get ports."""
@@ -177,8 +182,10 @@
         else:
             # TODO(markdr): When root is unavailable, search logcat output for
             #               the port the server has opened.
-            self.log.warning('Device cannot be put into root mode. SL4A '
-                             'server connections cannot be verified.')
+            self.log.warning(
+                "Device cannot be put into root mode. SL4A "
+                "server connections cannot be verified."
+            )
             return _SL4A_USER_FIND_PORT_CMD
 
     def _get_all_ports(self):
@@ -190,7 +197,7 @@
         Will return none if no port is found.
         """
         possible_ports = self._get_all_ports()
-        self.log.debug('SL4A Ports found: %s' % possible_ports)
+        self.log.debug("SL4A Ports found: %s" % possible_ports)
 
         # Acquire the lock. We lock this method because if multiple threads
         # attempt to get a server at the same time, they can potentially find
@@ -204,8 +211,7 @@
 
     def is_sl4a_installed(self):
         """Returns True if SL4A is installed on the AndroidDevice."""
-        return bool(
-            self.adb.shell('pm path %s' % SL4A_PKG_NAME, ignore_status=True))
+        return bool(self.adb.shell("pm path %s" % SL4A_PKG_NAME, ignore_status=True))
 
     def start_sl4a_service(self):
         """Starts the SL4A Service on the device.
@@ -217,14 +223,14 @@
             self._started = True
             if not self.is_sl4a_installed():
                 raise rpc_client.Sl4aNotInstalledError(
-                    'SL4A is not installed on device %s' % self.adb.serial)
+                    "SL4A is not installed on device %s" % self.adb.serial
+                )
             if self.adb.shell('(ps | grep "S %s") || true' % SL4A_PKG_NAME):
                 # Close all SL4A servers not opened by this manager.
                 # TODO(markdr): revert back to closing all ports after
                 # b/76147680 is resolved.
-                self.adb.shell('kill -9 $(pidof %s)' % SL4A_PKG_NAME)
-            self.adb.shell(
-                'settings put global hidden_api_blacklist_exemptions "*"')
+                self.adb.shell("kill -9 $(pidof %s)" % SL4A_PKG_NAME)
+            self.adb.shell('settings put global hidden_api_blacklist_exemptions "*"')
             # Start the service if it is not up already.
             self.adb.shell(_SL4A_START_SERVICE_CMD)
 
@@ -239,11 +245,9 @@
         else:
             return server_port
 
-    def create_session(self,
-                       max_connections=None,
-                       client_port=0,
-                       forwarded_port=0,
-                       server_port=None):
+    def create_session(
+        self, max_connections=None, client_port=0, forwarded_port=0, server_port=None
+    ):
         """Creates an SL4A server with the given ports if possible.
 
         The ports are not guaranteed to be available for use. If the port
@@ -264,30 +268,32 @@
         if server_port is None:
             # If a session already exists, use the same server.
             if len(self.sessions) > 0:
-                server_port = self.sessions[sorted(
-                    self.sessions.keys())[0]].server_port
+                server_port = self.sessions[sorted(self.sessions.keys())[0]].server_port
             # Otherwise, open a new server on a random port.
             else:
                 server_port = 0
         self.log.debug(
-            "Creating SL4A session client_port={}, forwarded_port={}, server_port={}"
-            .format(client_port, forwarded_port, server_port))
+            "Creating SL4A session client_port={}, forwarded_port={}, server_port={}".format(
+                client_port, forwarded_port, server_port
+            )
+        )
         self.start_sl4a_service()
-        session = sl4a_session.Sl4aSession(self.adb,
-                                           client_port,
-                                           server_port,
-                                           self.obtain_sl4a_server,
-                                           self.diagnose_failure,
-                                           forwarded_port,
-                                           max_connections=max_connections)
+        session = sl4a_session.Sl4aSession(
+            self.adb,
+            client_port,
+            server_port,
+            self.obtain_sl4a_server,
+            self.diagnose_failure,
+            forwarded_port,
+            max_connections=max_connections,
+        )
         self.sessions[session.uid] = session
         return session
 
     def stop_service(self):
         """Stops The SL4A Service. Force-stops the SL4A apk."""
         try:
-            self.adb.shell('am force-stop %s' % SL4A_PKG_NAME,
-                           ignore_status=True)
+            self.adb.shell("am force-stop %s" % SL4A_PKG_NAME, ignore_status=True)
         except Exception as e:
             self.log.warning("Fail to stop package %s: %s", SL4A_PKG_NAME, e)
         self._started = False
@@ -312,6 +318,7 @@
 
         if time_left <= 0:
             self.log.warning(
-                'Unable to close all un-managed servers! Server ports that are '
-                'still open are %s' % self._get_open_listening_port())
+                "Unable to close all un-managed servers! Server ports that are "
+                "still open are %s" % self._get_open_listening_port()
+            )
         self._sl4a_ports = set()
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_ports.py b/src/antlion/controllers/sl4a_lib/sl4a_ports.py
index d0172cc..db9917e 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_ports.py
+++ b/src/antlion/controllers/sl4a_lib/sl4a_ports.py
@@ -30,5 +30,8 @@
         self.server_port = server_port
 
     def __str__(self):
-        return '(%s, %s, %s)' % (self.client_port, self.forwarded_port,
-                                 self.server_port)
+        return "(%s, %s, %s)" % (
+            self.client_port,
+            self.forwarded_port,
+            self.server_port,
+        )
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_session.py b/src/antlion/controllers/sl4a_lib/sl4a_session.py
index e84def2..27edc48 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_session.py
+++ b/src/antlion/controllers/sl4a_lib/sl4a_session.py
@@ -49,14 +49,16 @@
             value is only unique during the lifetime of the SL4A apk.
     """
 
-    def __init__(self,
-                 adb,
-                 host_port,
-                 device_port,
-                 get_server_port_func,
-                 on_error_callback,
-                 forwarded_port=0,
-                 max_connections=None):
+    def __init__(
+        self,
+        adb,
+        host_port,
+        device_port,
+        get_server_port_func,
+        on_error_callback,
+        forwarded_port=0,
+        max_connections=None,
+    ):
         """Creates an SL4A Session.
 
         Args:
@@ -77,8 +79,7 @@
         self.adb = adb
 
         def _log_formatter(message):
-            return '[SL4A Session|%s|%s] %s' % (self.adb.serial, self.uid,
-                                                message)
+            return "[SL4A Session|%s|%s] %s" % (self.adb.serial, self.uid, message)
 
         self.log = logger.create_logger(_log_formatter)
 
@@ -89,17 +90,22 @@
         self._on_error_callback = on_error_callback
 
         connection_creator = self._rpc_connection_creator(host_port)
-        self.rpc_client = rpc_client.RpcClient(self.uid,
-                                               self.adb.serial,
-                                               self.diagnose_failure,
-                                               connection_creator,
-                                               max_connections=max_connections)
+        self.rpc_client = rpc_client.RpcClient(
+            self.uid,
+            self.adb.serial,
+            self.diagnose_failure,
+            connection_creator,
+            max_connections=max_connections,
+        )
 
     def _rpc_connection_creator(self, host_port):
         def create_client(uid):
-            return self._create_rpc_connection(ports=sl4a_ports.Sl4aPorts(
-                host_port, self.forwarded_port, self.server_port),
-                                               uid=uid)
+            return self._create_rpc_connection(
+                ports=sl4a_ports.Sl4aPorts(
+                    host_port, self.forwarded_port, self.server_port
+                ),
+                uid=uid,
+            )
 
         return create_client
 
@@ -121,19 +127,21 @@
         """
         if self.adb.get_version_number() < 37 and hinted_port == 0:
             self.log.error(
-                'The current version of ADB does not automatically provide a '
-                'port to forward. Please upgrade ADB to version 1.0.37 or '
-                'higher.')
-            raise Sl4aStartError('Unable to forward a port to the device.')
+                "The current version of ADB does not automatically provide a "
+                "port to forward. Please upgrade ADB to version 1.0.37 or "
+                "higher."
+            )
+            raise Sl4aStartError("Unable to forward a port to the device.")
         else:
             try:
                 return self.adb.tcp_forward(hinted_port, server_port)
             except AdbError as e:
-                if 'cannot bind listener' in e.stderr:
+                if "cannot bind listener" in e.stderr:
                     self.log.warning(
-                        'Unable to use %s to forward to device port %s due to: '
-                        '"%s". Attempting to choose a random port instead.' %
-                        (hinted_port, server_port, e.stderr))
+                        "Unable to use %s to forward to device port %s due to: "
+                        '"%s". Attempting to choose a random port instead.'
+                        % (hinted_port, server_port, e.stderr)
+                    )
                     # Call this method again, but this time with no hinted port.
                     return self._create_forwarded_port(server_port)
                 raise e
@@ -160,13 +168,12 @@
         self.server_port = ports.server_port
         # Forward the device port to the host.
         ports.forwarded_port = self._create_forwarded_port(
-            ports.server_port, hinted_port=ports.forwarded_port)
+            ports.server_port, hinted_port=ports.forwarded_port
+        )
         client_socket, fd = self._create_client_side_connection(ports)
-        client = rpc_connection.RpcConnection(self.adb,
-                                              ports,
-                                              client_socket,
-                                              fd,
-                                              uid=uid)
+        client = rpc_connection.RpcConnection(
+            self.adb, ports, client_socket, fd, uid=uid
+        )
         client.open()
         if uid == UNKNOWN_UID:
             self.uid = client.uid
@@ -180,7 +187,8 @@
         """Returns the EventDispatcher for this Sl4aSession."""
         if self._event_dispatcher is None:
             self._event_dispatcher = event_dispatcher.EventDispatcher(
-                self.adb.serial, self.rpc_client)
+                self.adb.serial, self.rpc_client
+            )
         return self._event_dispatcher
 
     def _create_client_side_connection(self, ports):
@@ -202,9 +210,10 @@
             except OSError as e:
                 # If the port is in use, log and ask for any open port.
                 if e.errno == errno.EADDRINUSE:
-                    self.log.warning('Port %s is already in use on the host. '
-                                     'Generating a random port.' %
-                                     ports.client_port)
+                    self.log.warning(
+                        "Port %s is already in use on the host. "
+                        "Generating a random port." % ports.client_port
+                    )
                     ports.client_port = 0
                     return self._create_client_side_connection(ports)
                 raise
@@ -212,11 +221,12 @@
         # Verify and obtain the port opened by SL4A.
         try:
             # Connect to the port that has been forwarded to the device.
-            client_socket.connect(('127.0.0.1', ports.forwarded_port))
+            client_socket.connect(("127.0.0.1", ports.forwarded_port))
         except socket.timeout:
             raise rpc_client.Sl4aConnectionError(
-                'SL4A has not connected over the specified port within the '
-                'timeout of %s seconds.' % SOCKET_TIMEOUT)
+                "SL4A has not connected over the specified port within the "
+                "timeout of %s seconds." % SOCKET_TIMEOUT
+            )
         except socket.error as e:
             # In extreme, unlikely cases, a socket error with
             # errno.EADDRNOTAVAIL can be raised when a desired host_port is
@@ -228,7 +238,7 @@
                 return self._create_client_side_connection(ports)
             raise
         ports.client_port = client_socket.getsockname()[1]
-        return client_socket, client_socket.makefile(mode='brw')
+        return client_socket, client_socket.makefile(mode="brw")
 
     def terminate(self):
         """Terminates the session.
@@ -238,12 +248,11 @@
         """
         with self._terminate_lock:
             if not self._terminated:
-                self.log.debug('Terminating Session.')
+                self.log.debug("Terminating Session.")
                 try:
                     self.rpc_client.closeSl4aSession()
                 except Exception as e:
-                    if "SL4A session has already been terminated" not in str(
-                            e):
+                    if "SL4A session has already been terminated" not in str(e):
                         self.log.warning(e)
                 # Must be set after closeSl4aSession so the rpc_client does not
                 # think the session has closed.
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_types.py b/src/antlion/controllers/sl4a_lib/sl4a_types.py
index 53ea0f0..434ff92 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_types.py
+++ b/src/antlion/controllers/sl4a_lib/sl4a_types.py
@@ -31,16 +31,18 @@
 class Sl4aNetworkInfo(DictObject):
     """SL4A equivalent of an Android NetworkInfo Object"""
 
-    def __init__(self,
-                 isAvailable=None,
-                 isConnected=None,
-                 isFailover=None,
-                 isRoaming=None,
-                 ExtraInfo=None,
-                 FailedReason=None,
-                 TypeName=None,
-                 SubtypeName=None,
-                 State=None):
+    def __init__(
+        self,
+        isAvailable=None,
+        isConnected=None,
+        isFailover=None,
+        isRoaming=None,
+        ExtraInfo=None,
+        FailedReason=None,
+        TypeName=None,
+        SubtypeName=None,
+        State=None,
+    ):
         DictObject.__init__(
             self,
             isAvailable=isAvailable,
@@ -51,4 +53,5 @@
             FailedReason=FailedReason,
             TypeName=TypeName,
             SubtypeName=SubtypeName,
-            State=State)
+            State=State,
+        )
diff --git a/src/antlion/controllers/sniffer.py b/src/antlion/controllers/sniffer.py
index 88b43d9..e87a547 100644
--- a/src/antlion/controllers/sniffer.py
+++ b/src/antlion/controllers/sniffer.py
@@ -41,18 +41,17 @@
         interface = c["Interface"]
         base_configs = c["BaseConfigs"]
         module_name = "antlion.controllers.sniffer_lib.{}.{}".format(
-            sniffer_type, sniffer_subtype)
+            sniffer_type, sniffer_subtype
+        )
         module = importlib.import_module(module_name)
         objs.append(
-            module.Sniffer(interface,
-                           logging.getLogger(),
-                           base_configs=base_configs))
+            module.Sniffer(interface, logging.getLogger(), base_configs=base_configs)
+        )
     return objs
 
 
 def destroy(objs):
-    """Destroys the sniffers and terminates any ongoing capture sessions.
-    """
+    """Destroys the sniffers and terminates any ongoing capture sessions."""
     for sniffer in objs:
         try:
             sniffer.stop_capture()
@@ -167,11 +166,13 @@
         """
         raise NotImplementedError("Base class should not be called directly!")
 
-    def start_capture(self,
-                      override_configs=None,
-                      additional_args=None,
-                      duration=None,
-                      packet_count=None):
+    def start_capture(
+        self,
+        override_configs=None,
+        additional_args=None,
+        duration=None,
+        packet_count=None,
+    ):
         """This function starts a capture which is saved to the specified file
         path.
 
diff --git a/src/antlion/controllers/sniffer_lib/local/local_base.py b/src/antlion/controllers/sniffer_lib/local/local_base.py
index 5a6af09..8873350 100644
--- a/src/antlion/controllers/sniffer_lib/local/local_base.py
+++ b/src/antlion/controllers/sniffer_lib/local/local_base.py
@@ -13,7 +13,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 """
 Class for Local sniffers - i.e. running on the local machine.
 
@@ -30,6 +29,7 @@
 from antlion import utils
 from antlion.controllers import sniffer
 
+
 class SnifferLocalBase(sniffer.Sniffer):
     """This class defines the common behaviors of WLAN sniffers running on
     WLAN interfaces of the local machine.
@@ -39,8 +39,7 @@
     """
 
     def __init__(self, interface, logger, base_configs=None):
-        """See base class documentation
-        """
+        """See base class documentation"""
         self._base_configs = None
         self._capture_file_path = ""
         self._interface = ""
@@ -61,13 +60,11 @@
             raise sniffer.ExecutionError(err)
 
     def get_interface(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return self._interface
 
     def get_type(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "local"
 
     def get_capture_file(self):
@@ -87,13 +84,16 @@
 
         if sniffer.Sniffer.CONFIG_KEY_CHANNEL in final_configs:
             try:
-                utils.exe_cmd("iwconfig", self._interface, "channel",
-                        str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]))
+                utils.exe_cmd(
+                    "iwconfig",
+                    self._interface,
+                    "channel",
+                    str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]),
+                )
             except Exception as err:
                 raise sniffer.ExecutionError(err)
 
-    def _get_command_line(self, additional_args=None, duration=None,
-                          packet_count=None):
+    def _get_command_line(self, additional_args=None, duration=None, packet_count=None):
         """Utility function to be implemented by every child class - which
         are the concrete sniffer classes. Each sniffer-specific class should
         derive the command line to execute its sniffer based on the specified
@@ -108,44 +108,51 @@
         self._process = None
         shutil.move(self._temp_capture_file_path, self._capture_file_path)
 
-    def start_capture(self, override_configs=None,
-                      additional_args=None, duration=None,
-                      packet_count=None):
-        """See base class documentation
-        """
+    def start_capture(
+        self,
+        override_configs=None,
+        additional_args=None,
+        duration=None,
+        packet_count=None,
+    ):
+        """See base class documentation"""
         if self._process is not None:
             raise sniffer.InvalidOperationError(
-                    "Trying to start a sniff while another is still running!")
-        capture_dir = os.path.join(self._logger.log_path,
-                                   "Sniffer-{}".format(self._interface))
+                "Trying to start a sniff while another is still running!"
+            )
+        capture_dir = os.path.join(
+            self._logger.log_path, "Sniffer-{}".format(self._interface)
+        )
         os.makedirs(capture_dir, exist_ok=True)
-        self._capture_file_path = os.path.join(capture_dir,
-                      "capture_{}.pcap".format(logger.get_log_file_timestamp()))
+        self._capture_file_path = os.path.join(
+            capture_dir, "capture_{}.pcap".format(logger.get_log_file_timestamp())
+        )
 
         self._pre_capture_config(override_configs)
         _, self._temp_capture_file_path = tempfile.mkstemp(suffix=".pcap")
 
-        cmd = self._get_command_line(additional_args=additional_args,
-                                duration=duration, packet_count=packet_count)
+        cmd = self._get_command_line(
+            additional_args=additional_args,
+            duration=duration,
+            packet_count=packet_count,
+        )
 
         self._process = utils.start_standing_subprocess(cmd)
         return sniffer.ActiveCaptureContext(self, duration)
 
     def stop_capture(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         if self._process is None:
-            raise sniffer.InvalidOperationError(
-                                      "Trying to stop a non-started process")
+            raise sniffer.InvalidOperationError("Trying to stop a non-started process")
         utils.stop_standing_subprocess(self._process, kill_signal=signal.SIGINT)
         self._post_process()
 
     def wait_for_capture(self, timeout=None):
-        """See base class documentation
-        """
+        """See base class documentation"""
         if self._process is None:
             raise sniffer.InvalidOperationError(
-                                  "Trying to wait on a non-started process")
+                "Trying to wait on a non-started process"
+            )
         try:
             utils.wait_for_standing_subprocess(self._process, timeout)
             self._post_process()
diff --git a/src/antlion/controllers/sniffer_lib/local/tcpdump.py b/src/antlion/controllers/sniffer_lib/local/tcpdump.py
index 39f8720..85622dc 100644
--- a/src/antlion/controllers/sniffer_lib/local/tcpdump.py
+++ b/src/antlion/controllers/sniffer_lib/local/tcpdump.py
@@ -20,38 +20,32 @@
 
 
 class Sniffer(local_base.SnifferLocalBase):
-    """This class defines a sniffer which uses tcpdump as its back-end
-    """
+    """This class defines a sniffer which uses tcpdump as its back-end"""
 
     def __init__(self, config_path, logger, base_configs=None):
-        """See base class documentation
-        """
+        """See base class documentation"""
         self._executable_path = None
 
         super(local_base.SnifferLocalBase).__init__(
-            config_path, logger, base_configs=base_configs)
+            config_path, logger, base_configs=base_configs
+        )
 
         self._executable_path = shutil.which("tcpdump")
         if self._executable_path is None:
-            raise sniffer.SnifferError(
-                "Cannot find a path to the 'tcpdump' executable")
+            raise sniffer.SnifferError("Cannot find a path to the 'tcpdump' executable")
 
     def get_descriptor(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "local-tcpdump-{}".format(self._interface)
 
     def get_subtype(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "tcpdump"
 
-    def _get_command_line(self,
-                          additional_args=None,
-                          duration=None,
-                          packet_count=None):
-        cmd = "{} -i {} -w {}".format(self._executable_path, self._interface,
-                                      self._temp_capture_file_path)
+    def _get_command_line(self, additional_args=None, duration=None, packet_count=None):
+        cmd = "{} -i {} -w {}".format(
+            self._executable_path, self._interface, self._temp_capture_file_path
+        )
         if packet_count is not None:
             cmd = "{} -c {}".format(cmd, packet_count)
         if additional_args is not None:
diff --git a/src/antlion/controllers/sniffer_lib/local/tshark.py b/src/antlion/controllers/sniffer_lib/local/tshark.py
index b95aa7d..dd79eed 100644
--- a/src/antlion/controllers/sniffer_lib/local/tshark.py
+++ b/src/antlion/controllers/sniffer_lib/local/tshark.py
@@ -18,37 +18,37 @@
 from antlion.controllers import sniffer
 from antlion.controllers.sniffer_lib.local import local_base
 
+
 class Sniffer(local_base.SnifferLocalBase):
-    """This class defines a sniffer which uses tshark as its back-end
-    """
+    """This class defines a sniffer which uses tshark as its back-end"""
 
     def __init__(self, config_path, logger, base_configs=None):
-        """See base class documentation
-        """
+        """See base class documentation"""
         self._executable_path = None
 
         super().__init__(config_path, logger, base_configs=base_configs)
 
-        self._executable_path = (shutil.which("tshark")
-                                 or shutil.which("/usr/local/bin/tshark"))
+        self._executable_path = shutil.which("tshark") or shutil.which(
+            "/usr/local/bin/tshark"
+        )
         if self._executable_path is None:
-            raise sniffer.SnifferError("Cannot find a path to the 'tshark' "
-                                 "executable (or to '/usr/local/bin/tshark')")
+            raise sniffer.SnifferError(
+                "Cannot find a path to the 'tshark' "
+                "executable (or to '/usr/local/bin/tshark')"
+            )
 
     def get_descriptor(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "local-tshark-{}-ch{}".format(self._interface)
 
     def get_subtype(self):
-        """See base class documentation
-        """
+        """See base class documentation"""
         return "tshark"
 
-    def _get_command_line(self, additional_args=None, duration=None,
-                          packet_count=None):
-        cmd = "{} -i {} -w {}".format(self._executable_path, self._interface,
-                                      self._temp_capture_file_path)
+    def _get_command_line(self, additional_args=None, duration=None, packet_count=None):
+        cmd = "{} -i {} -w {}".format(
+            self._executable_path, self._interface, self._temp_capture_file_path
+        )
         if duration is not None:
             cmd = "{} -a duration:{}".format(cmd, duration)
         if packet_count is not None:
diff --git a/src/antlion/controllers/spectracom_lib/__init__.py b/src/antlion/controllers/spectracom_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/spectracom_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/spectracom_lib/gsg6.py b/src/antlion/controllers/spectracom_lib/gsg6.py
deleted file mode 100644
index e3d0896..0000000
--- a/src/antlion/controllers/spectracom_lib/gsg6.py
+++ /dev/null
@@ -1,219 +0,0 @@
-"""Python module for Spectracom/Orolia GSG-6 GNSS simulator."""
-
-from antlion.controllers import abstract_inst
-
-
-class GSG6Error(abstract_inst.SocketInstrumentError):
-    """GSG-6 Instrument Error Class."""
-
-
-class GSG6(abstract_inst.SocketInstrument):
-    """GSG-6 Class, inherted from abstract_inst SocketInstrument."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for GSG-6.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        super(GSG6, self).__init__(ip_addr, ip_port)
-
-        self.idn = ''
-
-    def connect(self):
-        """Init and Connect to GSG-6."""
-        self._connect_socket()
-
-        self.get_idn()
-
-        infmsg = 'Connected to GSG-6, with ID: {}'.format(self.idn)
-        self._logger.debug(infmsg)
-
-    def close(self):
-        """Close GSG-6."""
-        self._close_socket()
-
-        self._logger.debug('Closed connection to GSG-6')
-
-    def get_idn(self):
-        """Get the Idenification of GSG-6.
-
-        Returns:
-            GSG-6 Identifier
-        """
-        self.idn = self._query('*IDN?')
-
-        return self.idn
-
-    def start_scenario(self, scenario=''):
-        """Start to run scenario.
-
-        Args:
-            scenario: Scenario to run.
-                Type, str.
-                Default, '', which will run current selected one.
-        """
-        if scenario:
-            cmd = 'SOUR:SCEN:LOAD ' + scenario
-            self._send(cmd)
-
-        self._send('SOUR:SCEN:CONT START')
-
-        if scenario:
-            infmsg = 'Started running scenario {}'.format(scenario)
-        else:
-            infmsg = 'Started running current scenario'
-
-        self._logger.debug(infmsg)
-
-    def stop_scenario(self):
-        """Stop the running scenario."""
-
-        self._send('SOUR:SCEN:CONT STOP')
-
-        self._logger.debug('Stopped running scenario')
-
-    def preset(self):
-        """Preset GSG-6 to default status."""
-        self._send('*RST')
-
-        self._logger.debug('Reset GSG-6')
-
-    def set_power(self, power_level):
-        """set GSG-6 transmit power on all bands.
-
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-
-        Raises:
-            GSG6Error: raise when power level is not in [-160, -65] range.
-        """
-        if not -160 <= power_level <= -65:
-            errmsg = ('"power_level" must be within [-160, -65], '
-                      'current input is {}').format(str(power_level))
-            raise GSG6Error(error=errmsg, command='set_power')
-
-        self._send(':SOUR:POW ' + str(round(power_level, 1)))
-
-        infmsg = 'Set GSG-6 transmit power to "{}"'.format(round(
-            power_level, 1))
-        self._logger.debug(infmsg)
-
-    def get_nmealog(self):
-        """Get GSG6 NMEA data.
-
-        Returns:
-            GSG6's NMEA data
-        """
-        nmea_data = self._query('SOUR:SCEN:LOG?')
-
-        return nmea_data
-
-    def toggle_scenario_power(self,
-                              toggle_onoff='ON',
-                              sat_id='',
-                              sat_system=''):
-        """Toggle ON OFF scenario.
-
-        Args:
-            toggle_onoff: turn on or off the satellites
-                Type, str. Option ON/OFF
-                Default, 'ON'
-            sat_id: satellite identifiers
-                Type, str.
-                Option 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                where xx is satellite identifiers no.
-                e.g.: G10
-            sat_system: to toggle On/OFF for all Satellites
-                Type, str
-                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
-        Raises:
-            GSG6Error: raise when toggle is not set.
-        """
-        if not sat_id and not sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(toggle_onoff))
-            infmsg = 'Set GSG-6 Power to "{}"'.format(toggle_onoff)
-            self._logger.debug(infmsg)
-
-        elif sat_id and not sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_id) + ',' +
-                       str(toggle_onoff))
-            infmsg = ('Set GSG-6 Power to "{}" for "{}" satellite '
-                      'identifiers').format(toggle_onoff, sat_id)
-            self._logger.debug(infmsg)
-
-        elif not sat_id and sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_system) + ',' +
-                       str(toggle_onoff))
-            infmsg = 'Set GSG-6 Power to "{}" for "{}" satellite system'.format(
-                toggle_onoff, sat_system)
-            self._logger.debug(infmsg)
-
-        else:
-            errmsg = ('"toggle power" must have either of these value [ON/OFF],'
-                      ' current input is {}').format(str(toggle_onoff))
-            raise GSG6Error(error=errmsg, command='toggle_scenario_power')
-
-    def set_scenario_power(self,
-                           power_level,
-                           sat_id='',
-                           sat_system='',
-                           freq_band=''):
-        """Set dynamic power for the running scenario.
-
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-            sat_id: set power level for specific satellite identifiers
-                Type, str. Option
-                'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                where xx is satellite identifiers number
-                e.g.: G10
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
-            freq_band: Frequency band to set the power level
-                Type, str
-                Option  [L1, L2, L5, ALL]
-                Default, '', assumed to be L1.
-        Raises:
-            GSG6Error: raise when power level is not in [-160, -65] range.
-        """
-        if freq_band == 'ALL':
-            if not -100 <= power_level <= 100:
-                errmsg = ('"power_level" must be within [-100, 100], for '
-                          '"freq_band"="ALL", current input is {}').format(
-                              str(power_level))
-                raise GSG6Error(error=errmsg, command='set_scenario_power')
-        else:
-            if not -160 <= power_level <= -65:
-                errmsg = ('"power_level" must be within [-160, -65], for '
-                          '"freq_band" != "ALL", current input is {}').format(
-                              str(power_level))
-                raise GSG6Error(error=errmsg, command='set_scenario_power')
-
-        if sat_id and not sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_id) + ',' +
-                       str(round(power_level, 1)) + ',' + str(freq_band))
-            infmsg = ('Set GSG-6 transmit power to "{}" for "{}" '
-                      'satellite id').format(round(power_level, 1), sat_id)
-            self._logger.debug(infmsg)
-
-        elif not sat_id and sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_system) + ',' +
-                       str(round(power_level, 1)) + ',' + str(freq_band))
-            infmsg = ('Set GSG-6 transmit power to "{}" for "{}" '
-                      'satellite system').format(round(power_level, 1),
-                                                 sat_system)
-            self._logger.debug(infmsg)
-
-        else:
-            errmsg = ('sat_id or sat_system must have value, current input of '
-                      'sat_id {} and sat_system {}').format(sat_id, sat_system)
-            raise GSG6Error(error=errmsg, command='set_scenario_power')
diff --git a/src/antlion/controllers/spirent_lib/__init__.py b/src/antlion/controllers/spirent_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/spirent_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/spirent_lib/gss6450.py b/src/antlion/controllers/spirent_lib/gss6450.py
deleted file mode 100644
index 3fd1191..0000000
--- a/src/antlion/controllers/spirent_lib/gss6450.py
+++ /dev/null
@@ -1,381 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Spirent GSS6450 GNSS RPS."""
-
-import datetime
-import numbers
-from antlion.controllers import abstract_inst
-
-
-class GSS6450Error(abstract_inst.SocketInstrumentError):
-    """GSS6450 Instrument Error Class."""
-
-
-class GSS6450(abstract_inst.RequestInstrument):
-    """GSS6450 Class, inherted from abstract_inst RequestInstrument."""
-
-    def __init__(self, ip_addr):
-        """Init method for GSS6450.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-        """
-        super(GSS6450, self).__init__(ip_addr)
-
-        self.idn = 'Spirent-GSS6450'
-
-    def _put(self, cmd):
-        """Send put command via GSS6450 HTTP Request and get response.
-
-        Args:
-            cmd: parameters listed in SHM_PUT.
-                Type, Str.
-
-        Returns:
-            resp: Response from the _query method.
-                Type, Str.
-        """
-        put_cmd = 'shm_put.shtml?' + cmd
-        resp = self._query(put_cmd)
-
-        return resp
-
-    def _get(self, cmd):
-        """Send get command via GSS6450 HTTP Request and get response.
-
-        Args:
-            cmd: parameters listed in SHM_GET.
-                Type, Str.
-
-        Returns:
-          resp: Response from the _query method.
-              Type, Str.
-        """
-        get_cmd = 'shm_get.shtml?' + cmd
-        resp = self._query(get_cmd)
-
-        return resp
-
-    def get_scenario_filename(self):
-        """Get the scenario filename of GSS6450.
-
-        Returns:
-            filename: RPS Scenario file name.
-                Type, Str.
-        """
-        resp_raw = self._get('-f')
-        filename = resp_raw.split(':')[-1].strip(' ')
-        self._logger.debug('Got scenario file name: "%s".', filename)
-
-        return filename
-
-    def get_scenario_description(self):
-        """Get the scenario description of GSS6450.
-
-        Returns:
-            description: RPS Scenario description.
-                Type, Str.
-        """
-        resp_raw = self._get('-d')
-        description = resp_raw.split('-d')[-1].strip(' ')
-
-        if description:
-            self._logger.debug('Got scenario description: "%s".', description)
-        else:
-            self._logger.warning('Got scenario description with empty string.')
-
-        return description
-
-    def get_scenario_location(self):
-        """Get the scenario location of GSS6450.
-
-        Returns:
-            location: RPS Scenario location.
-                Type, Str.
-        """
-        resp_raw = self._get('-i')
-        location = resp_raw.split('-i')[-1].strip(' ')
-
-        if location:
-            self._logger.debug('Got scenario location: "%s".', location)
-        else:
-            self._logger.warning('Got scenario location with empty string.')
-
-        return location
-
-    def get_operation_mode(self):
-        """Get the operation mode of GSS6450.
-
-        Returns:
-            mode: RPS Operation Mode.
-                Type, Str.
-                Option, STOPPED/PLAYING/RECORDING
-        """
-        resp_raw = self._get('-m')
-        mode = resp_raw.split('-m')[-1].strip(' ')
-        self._logger.debug('Got operation mode: "%s".', mode)
-
-        return mode
-
-    def get_battery_level(self):
-        """Get the battery level of GSS6450.
-
-        Returns:
-            batterylevel: RPS Battery Level.
-                Type, float.
-        """
-        resp_raw = self._get('-l')
-        batterylevel = float(resp_raw.split('-l')[-1].strip(' '))
-        self._logger.debug('Got battery level: %s%%.', batterylevel)
-
-        return batterylevel
-
-    def get_rfport_voltage(self):
-        """Get the RF port voltage of GSS6450.
-
-        Returns:
-            voltageout: RPS RF port voltage.
-                Type, str
-        """
-        resp_raw = self._get('-v')
-        voltageout = resp_raw.split('-v')[-1].strip(' ')
-        self._logger.debug('Got RF port voltage: "%s".', voltageout)
-
-        return voltageout
-
-    def get_storage_media(self):
-        """Get the storage media of GSS6450.
-
-        Returns:
-            media: RPS storage.
-                Type, str
-
-        Raises:
-            GSS6450Error: raise when request response is not support.
-        """
-        resp_raw = self._get('-M')
-        resp_num = resp_raw.split('-M')[-1].strip(' ')
-
-        if resp_num == '1':
-            media = '1-INTERNAL'
-        elif resp_num == '2':
-            media = '2-REMOVABLE'
-        else:
-            errmsg = ('"{}" is not recognized as GSS6450 valid storage media'
-                      ' type'.format(resp_num))
-            raise GSS6450Error(error=errmsg, command='get_storage_media')
-
-        self._logger.debug('Got current storage media: %s.', media)
-
-        return media
-
-    def get_attenuation(self):
-        """Get the attenuation of GSS6450.
-
-        Returns:
-            attenuation: RPS attenuation level, in dB.
-                Type, list of float.
-        """
-        resp_raw = self._get('-a')
-        resp_str = resp_raw.split('-a')[-1].strip(' ')
-        self._logger.debug('Got attenuation: %s dB.', resp_str)
-        attenuation = [float(itm) for itm in resp_str.split(',')]
-
-        return attenuation
-
-    def get_elapsed_time(self):
-        """Get the running scenario elapsed time of GSS6450.
-
-        Returns:
-            etime: RPS elapsed time.
-                Type, datetime.timedelta.
-        """
-        resp_raw = self._get('-e')
-        resp_str = resp_raw.split('-e')[-1].strip(' ')
-        self._logger.debug('Got senario elapsed time: "%s".', resp_str)
-        etime_tmp = datetime.datetime.strptime(resp_str, '%H:%M:%S')
-        etime = datetime.timedelta(hours=etime_tmp.hour,
-                                   minutes=etime_tmp.minute,
-                                   seconds=etime_tmp.second)
-
-        return etime
-
-    def get_playback_offset(self):
-        """Get the running scenario playback offset of GSS6450.
-
-        Returns:
-            offset: RPS playback offset.
-                Type, datetime.timedelta.
-        """
-        resp_raw = self._get('-o')
-        offset_tmp = float(resp_raw.split('-o')[-1].strip(' '))
-        self._logger.debug('Got senario playback offset: %s sec.', offset_tmp)
-        offset = datetime.timedelta(seconds=offset_tmp)
-
-        return offset
-
-    def play_scenario(self, scenario=''):
-        """Start to play scenario in GSS6450.
-
-        Args:
-            scenario: Scenario to play.
-                Type, str.
-                Default, '', which will run current selected one.
-        """
-        if scenario:
-            cmd = '-f{},-wP'.format(scenario)
-        else:
-            cmd = '-wP'
-
-        _ = self._put(cmd)
-
-        if scenario:
-            infmsg = 'Started playing scenario: "{}".'.format(scenario)
-        else:
-            infmsg = 'Started playing current scenario.'
-
-        self._logger.debug(infmsg)
-
-    def record_scenario(self, scenario=''):
-        """Start to record scenario in GSS6450.
-
-        Args:
-            scenario: Scenario to record.
-                Type, str.
-                Default, '', which will run current selected one.
-        """
-        if scenario:
-            cmd = '-f{},-wR'.format(scenario)
-        else:
-            cmd = '-wR'
-
-        _ = self._put(cmd)
-
-        if scenario:
-            infmsg = 'Started recording scenario: "{}".'.format(scenario)
-        else:
-            infmsg = 'Started recording scenario.'
-
-        self._logger.debug(infmsg)
-
-    def stop_scenario(self):
-        """Start to stop playing/recording scenario in GSS6450."""
-        _ = self._put('-wS')
-
-        self._logger.debug('Stopped playing/recording scanrio.')
-
-    def set_rfport_voltage(self, voltageout):
-        """Set the RF port voltage of GSS6450.
-
-        Args:
-            voltageout: RPS RF port voltage.
-                Type, str
-
-        Raises:
-            GSS6450Error: raise when voltageout input is not valid.
-        """
-        if voltageout == 'OFF':
-            voltage_cmd = '0'
-        elif voltageout == '3.3V':
-            voltage_cmd = '3'
-        elif voltageout == '5V':
-            voltage_cmd = '5'
-        else:
-            errmsg = ('"{}" is not recognized as GSS6450 valid RF port voltage'
-                      ' type'.format(voltageout))
-            raise GSS6450Error(error=errmsg, command='set_rfport_voltage')
-
-        _ = self._put('-v{},-wV'.format(voltage_cmd))
-        self._logger.debug('Set RF port voltage: "%s".', voltageout)
-
-    def set_attenuation(self, attenuation):
-        """Set the attenuation of GSS6450.
-
-        Args:
-            attenuation: RPS attenuation level, in dB.
-                Type, numerical.
-
-        Raises:
-            GSS6450Error: raise when attenuation is not in range.
-        """
-        if not 0 <= attenuation <= 31:
-            errmsg = ('"attenuation" must be within [0, 31], '
-                      'current input is {}').format(str(attenuation))
-            raise GSS6450Error(error=errmsg, command='set_attenuation')
-
-        attenuation_raw = round(attenuation)
-
-        if attenuation_raw != attenuation:
-            warningmsg = ('"attenuation" must be integer, current input '
-                          'will be rounded to {}'.format(attenuation_raw))
-            self._logger.warning(warningmsg)
-
-        _ = self._put('-a{},-wA'.format(attenuation_raw))
-
-        self._logger.debug('Set attenuation: %s dB.', attenuation_raw)
-
-    def set_playback_offset(self, offset):
-        """Set the playback offset of GSS6450.
-
-        Args:
-            offset: RPS playback offset.
-                Type, datetime.timedelta, or numerical.
-
-        Raises:
-            GSS6450Error: raise when offset is not numeric or timedelta.
-        """
-        if isinstance(offset, datetime.timedelta):
-            offset_raw = offset.total_seconds()
-        elif isinstance(offset, numbers.Number):
-            offset_raw = offset
-        else:
-            raise GSS6450Error(error=('"offset" must be numerical value or '
-                                      'datetime.timedelta'),
-                               command='set_playback_offset')
-
-        _ = self._put('-o{}'.format(offset_raw))
-
-        self._logger.debug('Set playback offset: %s sec.', offset_raw)
-
-    def set_storage_media(self, media):
-        """Set the storage media of GSS6450.
-
-        Args:
-            media: RPS storage Media, Internal or External.
-                Type, str. Option, 'internal', 'removable'
-
-        Raises:
-            GSS6450Error: raise when media option is not support.
-        """
-        if media == 'internal':
-            raw_media = '1'
-        elif media == 'removable':
-            raw_media = '2'
-        else:
-            raise GSS6450Error(
-                error=('"media" input must be in ["internal", "removable"]. '
-                       ' Current input is {}'.format(media)),
-                command='set_storage_media')
-
-        _ = self._put('-M{}-wM'.format(raw_media))
-
-        resp_raw = self.get_storage_media()
-        if raw_media != resp_raw[0]:
-            raise GSS6450Error(
-                error=('Setting media "{}" is not the same as queried media '
-                       '"{}".'.format(media, resp_raw)),
-                command='set_storage_media')
diff --git a/src/antlion/controllers/spirent_lib/gss7000.py b/src/antlion/controllers/spirent_lib/gss7000.py
deleted file mode 100644
index 7cc47d4..0000000
--- a/src/antlion/controllers/spirent_lib/gss7000.py
+++ /dev/null
@@ -1,490 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Python module for Spirent GSS7000 GNSS simulator.
-@author: Clay Liao (jianhsiungliao@)
-"""
-from time import sleep
-import xml.etree.ElementTree as ET
-from antlion.controllers import abstract_inst
-
-
-def get_xml_text(xml_string='', tag=''):
-    """Parse xml from string and return specific tag
-
-        Args:
-            xml_string: xml string,
-                Type, Str.
-            tag: tag in xml,
-                Type, Str.
-
-        Returns:
-            text: Text content in the tag
-                Type, Str.
-        """
-    if xml_string and tag:
-        root = ET.fromstring(xml_string)
-        try:
-            text = str(root.find(tag).text).rstrip().lstrip()
-        except ValueError:
-            text = 'INVALID DATA'
-    else:
-        text = 'INVALID DATA'
-    return text
-
-
-class GSS7000Error(abstract_inst.SocketInstrumentError):
-    """GSS7000 Instrument Error Class."""
-
-
-class AbstractInstGss7000(abstract_inst.SocketInstrument):
-    """Abstract instrument for  GSS7000"""
-
-    def _query(self, cmd):
-        """query instrument via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        self._send(cmd)
-        self._wait()
-        resp = self._recv()
-        return resp
-
-    def _wait(self, wait_time=1):
-        """wait function
-        Args:
-            wait_time: wait time in sec.
-                Type, int,
-                Default, 1.
-        """
-        sleep(wait_time)
-
-
-class GSS7000Ctrl(AbstractInstGss7000):
-    """GSS7000 control daemon class"""
-
-    def __init__(self, ip_addr, ip_port=7717):
-        """Init method for GSS7000 Control Daemon.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        super().__init__(ip_addr, ip_port)
-        self.idn = 'Spirent-GSS7000 Control Daemon'
-
-    def connect(self):
-        """Init and Connect to GSS7000 Control Daemon."""
-        # Connect socket then connect socket again
-        self._close_socket()
-        self._connect_socket()
-        # Stop GSS7000 Control Daeamon Then Start
-        self._query('STOP_ENGINE')
-        self._wait()
-        self._query('START_ENGINE')
-
-    def close(self):
-        """Close GSS7000 control daemon"""
-        self._close_socket()
-        self._logger.debug('Closed connection to GSS7000 control daemon')
-
-
-class GSS7000(AbstractInstGss7000):
-    """GSS7000 Class, inherted from abstract_inst SocketInstrument."""
-
-    def __init__(self, ip_addr, engine_ip_port=15650, ctrl_ip_port=7717):
-        """Init method for GSS7000.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            engine_ip_port: TCPIP Port for
-                Type, str.
-            ctrl_ip_port: TCPIP Port for Control Daemon
-        """
-        super().__init__(ip_addr, engine_ip_port)
-        self.idn = ''
-        self.connected = False
-        self.capability = []
-        self.gss7000_ctrl_daemon = GSS7000Ctrl(ip_addr, ctrl_ip_port)
-        # Close control daemon and engine sockets at the beginning
-        self.gss7000_ctrl_daemon._close_socket()
-        self._close_socket()
-
-    def connect(self):
-        """Connect GSS7000 engine daemon"""
-        # Connect control daemon socket
-        self._logger.debug('Connect to GSS7000')
-        self.gss7000_ctrl_daemon.connect()
-        # Connect to remote engine socket
-        self._wait()
-        self._connect_socket()
-        self.connected = True
-        self.get_hw_capability()
-
-    def close(self):
-        """Close GSS7000 engine daemon"""
-        # Close GSS7000 control daemon
-        self.gss7000_ctrl_daemon.close()
-        # Close GSS7000 engine daemon
-        self._close_socket()
-        self._logger.debug('Closed connection to GSS7000 engine daemon')
-
-    def _parse_hw_cap(self, xml):
-        """Parse GSS7000 hardware capability xml to list.
-            Args:
-                xml: hardware capability xml,
-                    Type, str.
-
-            Returns:
-                capability: Hardware capability dictionary
-                    Type, list.
-        """
-        root = ET.fromstring(xml)
-        capability_ls = list()
-        sig_cap_list = root.find('data').find('Signal_capabilities').findall(
-            'Signal')
-        for signal in sig_cap_list:
-            value = str(signal.text).rstrip().lstrip()
-            capability_ls.extend(value.upper().split(' '))
-        return capability_ls
-
-    def get_hw_capability(self):
-        """Check GSS7000 hardware capability
-
-            Returns:
-                capability: Hardware capability dictionary,
-                    Type, list.
-        """
-        if self.connected:
-            capability_xml = self._query('GET_LICENCED_HARDWARE_CAPABILITY')
-            self.capability = self._parse_hw_cap(capability_xml)
-
-        return self.capability
-
-    def get_idn(self):
-        """Get the SimREPLAYplus Version
-
-        Returns:
-            SimREPLAYplus Version
-        """
-        idn_xml = self._query('*IDN?')
-        self.idn = get_xml_text(idn_xml, 'data')
-        return self.idn
-
-    def load_scenario(self, scenario=''):
-        """Load the scenario.
-
-        Args:
-            scenario: path of scenario,
-                Type, str
-        """
-        if scenario == '':
-            errmsg = ('Missing scenario file')
-            raise GSS7000Error(error=errmsg, command='load_scenario')
-        else:
-            self._logger.debug('Stopped the original scenario')
-            self._query('-,EN,1')
-            cmd = 'SC,' + scenario
-            self._logger.debug('Loading scenario')
-            self._query(cmd)
-            self._logger.debug('Scenario is loaded')
-            return True
-        return False
-
-    def start_scenario(self, scenario=''):
-        """Load and Start the running scenario.
-
-        Args:
-            scenario: path of scenario,
-                Type, str
-        """
-        if scenario:
-            if self.load_scenario(scenario):
-                self._query('RU')
-            else:
-                infmsg = 'No scenario is loaded. Stop running scenario'
-                self._logger.debug(infmsg)
-        else:
-            pass
-
-        if scenario:
-            infmsg = 'Started running scenario {}'.format(scenario)
-        else:
-            infmsg = 'Started running current scenario'
-
-        self._logger.debug(infmsg)
-
-    def get_scenario_name(self):
-        """Get current scenario name"""
-        sc_name_xml = self._query('SC_NAME')
-        return get_xml_text(sc_name_xml, 'data')
-
-    def stop_scenario(self):
-        """Stop the running scenario."""
-        self._query('-,EN,1')
-        self._logger.debug('Stopped running scenario')
-
-    def set_power_offset(self, ant=1, power_offset=0):
-        """Set Power Offset of GSS7000 Tx
-        Args:
-            ant: antenna number of GSS7000
-            power_offset: transmit power offset level
-                Type, float.
-                Decimal, unit [dB]
-
-        Raises:
-            GSS7000Error: raise when power offset level is not in [-49, 15] range.
-        """
-        if not -49 <= power_offset <= 15:
-            errmsg = (f'"power_offset" must be within [-49, 15], '
-                      f'current input is {power_offset}')
-            raise GSS7000Error(error=errmsg, command='set_power_offset')
-
-        cmd = f'-,POW_LEV,V1_A{ant},{power_offset},GPS,0,0,1,1,1,1,0'
-        self._query(cmd)
-
-        infmsg = f'Set veichel 1 antenna {ant} power offset: {power_offset}'
-        self._logger.debug(infmsg)
-
-    def set_ref_power(self, ref_dBm=-130):
-        """Set Ref Power of GSS7000 Tx
-        Args:
-            ref_dBm: transmit reference power level in dBm for GSS7000
-                Type, float.
-                Decimal, unit [dBm]
-
-        Raises:
-            GSS7000Error: raise when power offset level is not in [-170, -115] range.
-        """
-        if not -170 <= ref_dBm <= -115:
-            errmsg = ('"power_offset" must be within [-170, -115], '
-                      'current input is {}').format(str(ref_dBm))
-            raise GSS7000Error(error=errmsg, command='set_ref_power')
-        cmd = 'REF_DBM,{}'.format(str(round(ref_dBm, 1)))
-        self._query(cmd)
-        infmsg = 'Set reference power level: {}'.format(str(round(ref_dBm, 1)))
-        self._logger.debug(infmsg)
-
-    def get_status(self, return_txt=False):
-        """Get current GSS7000 Status
-        Args:
-            return_txt: booling for determining the return results
-                Type, booling.
-        """
-        status_xml = self._query('NULL')
-        status = get_xml_text(status_xml, 'status')
-        if return_txt:
-            status_dict = {
-                '0': 'No Scenario loaded',
-                '1': 'Not completed loading a scenario',
-                '2': 'Idle, ready to run a scenario',
-                '3': 'Arming the scenario',
-                '4': 'Completed arming; or waiting for a command or'
-                     'trigger signal to start the scenario',
-                '5': 'Scenario running',
-                '6': 'Current scenario is paused.',
-                '7': 'Active scenario has stopped and has not been reset.'
-                     'Waiting for further commands.'
-            }
-            return status_dict.get(status)
-        else:
-            return int(status)
-
-    def set_power(self, power_level=-130):
-        """Set Power Level of GSS7000 Tx
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-
-        Raises:
-            GSS7000Error: raise when power level is not in [-170, -115] range.
-        """
-        if not -170 <= power_level <= -115:
-            errmsg = (f'"power_level" must be within [-170, -115], '
-                      f'current input is {power_level}')
-            raise GSS7000Error(error=errmsg, command='set_power')
-
-        power_offset = power_level + 130
-        self.set_power_offset(1, power_offset)
-        self.set_power_offset(2, power_offset)
-
-        infmsg = 'Set GSS7000 transmit power to "{}"'.format(
-            round(power_level, 1))
-        self._logger.debug(infmsg)
-
-    def power_lev_offset_cal(self, power_level=-130, sat='GPS', band='L1'):
-        """Convert target power level to power offset for GSS7000 power setting
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-                Default. -130
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL'
-                Type, str
-            freq_band: Frequency band to set the power level
-                Type, str
-                Option 'L1/L5/B1I/B1C/B2A/E5'
-                Default, '', assumed to be L1.
-        Return:
-            power_offset: The calculated power offset for setting GSS7000 GNSS target power.
-        """
-        gss7000_tx_pwr = {
-            'GPS_L1': -130,
-            'GPS_L5': -127.9,
-            'GLONASS_F1': -131,
-            'GALILEO_L1': -127,
-            'GALILEO_E5': -122,
-            'BEIDOU_B1I': -133,
-            'BEIDOU_B1C': -130,
-            'BEIDOU_B2A': -127,
-            'QZSS_L1': -128.5,
-            'QZSS_L5': -124.9,
-            'IRNSS_L5': -130
-        }
-
-        sat_band = f'{sat}_{band}'
-        infmsg = f'Target satellite system and band: {sat_band}'
-        self._logger.debug(infmsg)
-        default_pwr_lev = gss7000_tx_pwr.get(sat_band, -130)
-        power_offset = power_level - default_pwr_lev
-        infmsg = (
-            f'Targer power: {power_level}; Default power: {default_pwr_lev};'
-            f' Power offset: {power_offset}')
-        self._logger.debug(infmsg)
-
-        return power_offset
-
-    def sat_band_convert(self, sat, band):
-        """Satellite system and operation band conversion and check.
-        Args:
-            sat: to set power level for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL/BDS'
-                Type, str
-            band: Frequency band to set the power level
-                Type, str
-                Option 'L1/L5/B1I/B1C/B2A/F1/E5'
-                Default, '', assumed to be L1.
-        """
-        sat_system_dict = {
-            'GPS': 'GPS',
-            'GLO': 'GLONASS',
-            'GAL': 'GALILEO',
-            'BDS': 'BEIDOU',
-            'IRNSS': 'IRNSS',
-            'ALL': 'GPS'
-        }
-        sat = sat_system_dict.get(sat, 'GPS')
-        if band == '':
-            infmsg = 'No band is set. Set to default band = L1'
-            self._logger.debug(infmsg)
-            band = 'L1'
-        if sat == '':
-            infmsg = 'No satellite system is set. Set to default sat = GPS'
-            self._logger.debug(infmsg)
-            sat = 'GPS'
-        sat_band = f'{sat}_{band}'
-        self._logger.debug(f'Current band: {sat_band}')
-        self._logger.debug(f'Capability: {self.capability}')
-        # Check if satellite standard and band are supported
-        # If not in support list, return GPS_L1 as default
-        if not sat_band in self.capability:
-            errmsg = (
-                f'Satellite system and band ({sat_band}) are not supported.'
-                f'The GSS7000 support list: {self.capability}')
-            raise GSS7000Error(error=errmsg, command='set_scenario_power')
-        else:
-            sat_band_tp = tuple(sat_band.split('_'))
-
-        return sat_band_tp
-
-    def set_scenario_power(self,
-                           power_level=-130,
-                           sat_id='',
-                           sat_system='',
-                           freq_band='L1'):
-        """Set dynamic power for the running scenario.
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-                Default. -130
-            sat_id: set power level for specific satellite identifiers
-                Type, int.
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL/BDS'
-                Type, str
-                Default, '', assumed to be GPS.
-            freq_band: Frequency band to set the power level
-                Type, str
-                Option 'L1/L5/B1I/B1C/B2A/F1/E5/ALL'
-                Default, '', assumed to be L1.
-        Raises:
-            GSS7000Error: raise when power offset is not in [-49, -15] range.
-        """
-        band_dict = {
-            'L1': 1,
-            'L5': 2,
-            'B2A': 2,
-            'B1I': 1,
-            'B1C': 1,
-            'F1': 1,
-            'E5': 2,
-            'ALL': 3
-        }
-
-        # Convert and check satellite system and band
-        sat, band = self.sat_band_convert(sat_system, freq_band)
-        # Get freq band setting
-        band_cmd = band_dict.get(band, 1)
-
-        if not sat_id:
-            sat_id = 0
-            all_tx_type = 1
-        else:
-            all_tx_type = 0
-
-        # Convert absolute power level to absolute power offset.
-        power_offset = self.power_lev_offset_cal(power_level, sat, band)
-
-        if not -49 <= power_offset <= 15:
-            errmsg = (f'"power_offset" must be within [-49, 15], '
-                      f'current input is {power_offset}')
-            raise GSS7000Error(error=errmsg, command='set_power_offset')
-
-        if band_cmd == 1:
-            cmd = f'-,POW_LEV,v1_a1,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
-            self._query(cmd)
-        elif band_cmd == 2:
-            cmd = f'-,POW_LEV,v1_a2,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
-            self._query(cmd)
-        elif band_cmd == 3:
-            cmd = f'-,POW_LEV,v1_a1,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
-            self._query(cmd)
-            cmd = f'-,POW_LEV,v1_a2,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
diff --git a/src/antlion/controllers/tigertail.py b/src/antlion/controllers/tigertail.py
deleted file mode 100644
index e99c6e5..0000000
--- a/src/antlion/controllers/tigertail.py
+++ /dev/null
@@ -1,123 +0,0 @@
-"""Module manager the required definitions for tigertail"""
-
-import logging
-import time
-
-from enum import Enum
-
-from antlion.libs.proc import job
-
-MOBLY_CONTROLLER_CONFIG_NAME = "Tigertail"
-ACTS_CONTROLLER_REFERENCE_NAME = "tigertails"
-
-TIGERTAIL_SLEEP_TIME = 5
-
-
-def create(configs):
-    """Takes a list of Tigertail serial numbers and returns Tigertail Controllers.
-
-    Args:
-        configs: A list of serial numbers
-
-    Returns:
-        a list of Tigertail controllers
-
-    Raises:
-        ValueError if the configuration is not a list of serial number
-    """
-    tigertails = []
-    if isinstance(configs, list):
-        for serial_no in configs:
-            tigertail = Tigertail(serial_no)
-            tigertails.append(tigertail)
-    else:
-        raise ValueError(
-            'Invalid config for tigertail, should be a list of serial number')
-
-    return tigertails
-
-
-def destroy(tigertails):
-    pass
-
-
-def get_info(tigertails):
-    return [tigertail.get_info() for tigertail in tigertails]
-
-
-class TigertailError(Exception):
-    pass
-
-
-class TigertailState(Enum):
-    def __str__(self):
-        return str(self.value)
-
-    A = 'A'
-    B = 'B'
-    Off = 'off'
-
-
-class Tigertail(object):
-    def __init__(self, serial_number):
-        self.serial_number = serial_number
-        self.tigertool_bin = None
-
-    def setup(self, user_params):
-        """Links tigertool binary
-
-        This function needs to be:
-        Args:
-            user_params: User defined parameters. Expected field is:
-            {
-                // required, string or list of strings
-                tigertool: ['/path/to/tigertool.par']
-            }
-        """
-        tigertool_path = user_params['tigertool']
-        if tigertool_path is None:
-            self.tigertool_bin = None
-        elif isinstance(tigertool_path, str):
-            self.tigertool_bin = tigertool_path
-        elif isinstance(tigertool_path, list):
-            if len(tigertool_path) == 0:
-                self.tigertool_bin = None
-            else:
-                self.tigertool_bin = tigertool_path[0]
-
-        if self.tigertool_bin is None:
-            raise TigertailError('Tigertail binary not found')
-
-        logging.getLogger().debug(
-            f'Setup {self.serial_number} with binary at {self.tigertool_bin}')
-
-    def turn_on_mux_A(self):
-        self._set_tigertail_state(TigertailState.A)
-
-    def turn_on_mux_B(self):
-        self._set_tigertail_state(TigertailState.B)
-
-    def turn_off(self):
-        self._set_tigertail_state(TigertailState.Off)
-
-    def get_info(self):
-        return {'tigertail_serial_no': self.serial_number}
-
-    def _set_tigertail_state(self, state: TigertailState):
-        """Sets state for tigertail, there are 3 possible states:
-            A  : enable port A
-            B  : enable port B
-            Off: turn off both ports
-        """
-        result = job.run([
-            self.tigertool_bin, '--serialno',
-            str(self.serial_number), '--mux',
-            str(state)
-        ],
-                         timeout=10)
-
-        if result.stderr != '':
-            raise TigertailError(result.stderr)
-
-        # Sleep time to let the device connected/disconnect to tigertail
-        time.sleep(TIGERTAIL_SLEEP_TIME)
diff --git a/src/antlion/controllers/utils_lib/commands/ip.py b/src/antlion/controllers/utils_lib/commands/ip.py
index 0b14d2e..7e028b1 100644
--- a/src/antlion/controllers/utils_lib/commands/ip.py
+++ b/src/antlion/controllers/utils_lib/commands/ip.py
@@ -44,7 +44,7 @@
                  where address is a ipaddress.IPv4Interface and broadcast
                  is an ipaddress.IPv4Address.
         """
-        results = self._runner.run('ip addr show dev %s' % net_interface)
+        results = self._runner.run("ip addr show dev %s" % net_interface)
         lines = results.stdout.splitlines()
 
         # Example stdout:
@@ -57,18 +57,17 @@
 
         for line in lines:
             line = line.strip()
-            match = re.search('inet (?P<address>[^\s]*) brd (?P<bcast>[^\s]*)',
-                              line)
+            match = re.search("inet (?P<address>[^\s]*) brd (?P<bcast>[^\s]*)", line)
             if match:
                 d = match.groupdict()
-                address = ipaddress.IPv4Interface(d['address'])
-                bcast = ipaddress.IPv4Address(d['bcast'])
+                address = ipaddress.IPv4Interface(d["address"])
+                bcast = ipaddress.IPv4Address(d["bcast"])
                 yield (address, bcast)
 
-            match = re.search('inet (?P<address>[^\s]*)', line)
+            match = re.search("inet (?P<address>[^\s]*)", line)
             if match:
                 d = match.groupdict()
-                address = ipaddress.IPv4Interface(d['address'])
+                address = ipaddress.IPv4Interface(d["address"])
                 yield (address, None)
 
     def add_ipv4_address(self, net_interface, address, broadcast=None):
@@ -83,11 +82,12 @@
                        this net_interfaces subnet.
         """
         if broadcast:
-            self._runner.run('ip addr add %s broadcast %s dev %s' %
-                             (address, broadcast, net_interface))
+            self._runner.run(
+                "ip addr add %s broadcast %s dev %s"
+                % (address, broadcast, net_interface)
+            )
         else:
-            self._runner.run('ip addr add %s dev %s' %
-                             (address, net_interface))
+            self._runner.run("ip addr add %s dev %s" % (address, net_interface))
 
     def remove_ipv4_address(self, net_interface, address, ignore_status=False):
         """Remove an ipv4 address.
@@ -104,8 +104,9 @@
             The job result from a the command
         """
         return self._runner.run(
-            'ip addr del %s dev %s' % (address, net_interface),
-            ignore_status=ignore_status)
+            "ip addr del %s dev %s" % (address, net_interface),
+            ignore_status=ignore_status,
+        )
 
     def set_ipv4_address(self, net_interface, address, broadcast=None):
         """Set the ipv4 address.
@@ -134,23 +135,26 @@
         ip_info = self.get_ipv4_addresses(net_interface)
 
         for address, _ in ip_info:
-            result = self.remove_ipv4_address(net_interface, address,
-                                              ignore_status=True)
+            result = self.remove_ipv4_address(
+                net_interface, address, ignore_status=True
+            )
             # It is possible that the address has already been removed by the
             # time this command has been called. In such a case, we would get
             # this error message.
-            error_msg = 'RTNETLINK answers: Cannot assign requested address'
+            error_msg = "RTNETLINK answers: Cannot assign requested address"
             if result.exit_status != 0:
                 if error_msg in result.stderr:
                     # If it was removed by another process, log a warning
                     if address not in self.get_ipv4_addresses(net_interface):
                         self._runner.log.warning(
-                            'Unable to remove address %s. The address was '
-                            'removed by another process.' % address)
+                            "Unable to remove address %s. The address was "
+                            "removed by another process." % address
+                        )
                         continue
                     # If it was not removed, raise an error
                     self._runner.log.error(
-                        'Unable to remove address %s. The address is still '
-                        'registered to %s, despite call for removal.' %
-                        (address, net_interface))
+                        "Unable to remove address %s. The address is still "
+                        "registered to %s, despite call for removal."
+                        % (address, net_interface)
+                    )
                 raise job.Error(result)
diff --git a/src/antlion/controllers/utils_lib/commands/route.py b/src/antlion/controllers/utils_lib/commands/route.py
index 3897f39..a886455 100644
--- a/src/antlion/controllers/utils_lib/commands/route.py
+++ b/src/antlion/controllers/utils_lib/commands/route.py
@@ -29,7 +29,7 @@
 class LinuxRouteCommand(object):
     """Interface for doing standard ip route commands on a linux system."""
 
-    DEFAULT_ROUTE = 'default'
+    DEFAULT_ROUTE = "default"
 
     def __init__(self, runner):
         """
@@ -39,7 +39,7 @@
         """
         self._runner = runner
 
-    def add_route(self, net_interface, address, proto='static'):
+    def add_route(self, net_interface, address, proto="static"):
         """Add an entry to the ip routing table.
 
         Will add a route for either a specific ip address, or a network.
@@ -61,13 +61,14 @@
             NetworkInterfaceDown: Raised when the network interface is down.
         """
         try:
-            self._runner.run(f'ip route add {address} dev {net_interface} proto {proto}')
+            self._runner.run(
+                f"ip route add {address} dev {net_interface} proto {proto}"
+            )
         except connection.CommandError as e:
-            if 'File exists' in e.result.stderr:
-                raise Error('Route already exists.')
-            if 'Network is down' in e.result.stderr:
-                raise NetworkInterfaceDown(
-                    'Device must be up for adding a route.')
+            if "File exists" in e.result.stderr:
+                raise Error("Route already exists.")
+            if "Network is down" in e.result.stderr:
+                raise NetworkInterfaceDown("Device must be up for adding a route.")
             raise
 
     def get_routes(self, net_interface=None):
@@ -84,8 +85,8 @@
                  it will be a ipaddress.IPv4Network otherwise it is a
                  ipaddress.IPv4Address.
         """
-        result_ipv4 = self._runner.run('ip -4 route show')
-        result_ipv6 = self._runner.run('ip -6 route show')
+        result_ipv4 = self._runner.run("ip -4 route show")
+        result_ipv6 = self._runner.run("ip -6 route show")
 
         lines = result_ipv4.stdout.splitlines() + result_ipv6.stdout.splitlines()
 
@@ -96,22 +97,22 @@
         # 192.168.2.1 dev eth2 proto kernel scope link metric 1
         # fe80::/64 dev wlan0 proto static metric 1024
         for line in lines:
-            if not 'dev' in line:
+            if not "dev" in line:
                 continue
 
             if line.startswith(self.DEFAULT_ROUTE):
                 # The default route entry is formatted differently.
-                match = re.search('dev (?P<net_interface>\S+)', line)
+                match = re.search("dev (?P<net_interface>\S+)", line)
                 pair = None
                 if match:
                     # When there is a match for the route entry pattern create
                     # A pair to hold the info.
-                    pair = (self.DEFAULT_ROUTE,
-                            match.groupdict()['net_interface'])
+                    pair = (self.DEFAULT_ROUTE, match.groupdict()["net_interface"])
             else:
                 # Test the normal route entry pattern.
                 match = re.search(
-                    '(?P<address>[0-9A-Fa-f\.\:/]+) dev (?P<net_interface>\S+)', line)
+                    "(?P<address>[0-9A-Fa-f\.\:/]+) dev (?P<net_interface>\S+)", line
+                )
                 pair = None
                 if match:
                     # When there is a match for the route entry pattern create
@@ -119,11 +120,11 @@
                     d = match.groupdict()
                     # Route can be either a network or specific address
                     try:
-                        address = ipaddress.ip_address(d['address'])
+                        address = ipaddress.ip_address(d["address"])
                     except ValueError:
-                        address = d['address']
+                        address = d["address"]
 
-                    pair = (address, d['net_interface'])
+                    pair = (address, d["net_interface"])
 
             # No pair means no pattern was found.
             if not pair:
@@ -171,12 +172,11 @@
         """
         try:
             if net_interface:
-                self._runner.run('ip route del %s dev %s' %
-                                 (address, net_interface))
+                self._runner.run("ip route del %s dev %s" % (address, net_interface))
             else:
-                self._runner.run('ip route del %s' % address)
+                self._runner.run("ip route del %s" % address)
         except connection.CommandError as e:
-            if 'No such process' in e.result.stderr:
+            if "No such process" in e.result.stderr:
                 # The route didn't exist.
                 return
             raise
diff --git a/src/antlion/controllers/utils_lib/commands/shell.py b/src/antlion/controllers/utils_lib/commands/shell.py
index 34abeeb..ed105af 100644
--- a/src/antlion/controllers/utils_lib/commands/shell.py
+++ b/src/antlion/controllers/utils_lib/commands/shell.py
@@ -57,7 +57,7 @@
             job.Error: When the command executed but had an error.
         """
         if self._working_dir:
-            command_str = 'cd %s; %s' % (self._working_dir, command)
+            command_str = "cd %s; %s" % (self._working_dir, command)
         else:
             command_str = command
 
@@ -80,11 +80,11 @@
         """
         try:
             if isinstance(identifier, str):
-                self.run('ps aux | grep -v grep | grep %s' % identifier)
+                self.run("ps aux | grep -v grep | grep %s" % identifier)
             elif isinstance(identifier, int):
                 self.signal(identifier, 0)
             else:
-                raise ValueError('Bad type was given for identifier')
+                raise ValueError("Bad type was given for identifier")
 
             return True
         except job.Error:
@@ -103,7 +103,7 @@
                   if no pids were found.
         """
         try:
-            result = self.run('ps aux | grep -v grep | grep %s' % identifier)
+            result = self.run("ps aux | grep -v grep | grep %s" % identifier)
         except job.Error as e:
             if e.result.exit_status == 1:
                 # Grep returns exit status 1 when no lines are selected. This is
@@ -136,7 +136,7 @@
             True if the string or pattern was found, False otherwise.
         """
         try:
-            self.run('grep %s %s' % (shlex.quote(search_string), file_name))
+            self.run("grep %s %s" % (shlex.quote(search_string), file_name))
             return True
         except job.Error:
             return False
@@ -150,7 +150,7 @@
         Returns:
             A string of the files contents.
         """
-        return self.run('cat %s' % file_name).stdout
+        return self.run("cat %s" % file_name).stdout
 
     def write_file(self, file_name, data):
         """Writes a block of data to a file through the shell.
@@ -159,7 +159,7 @@
             file_name: The name of the file to write to.
             data: The string of data to write.
         """
-        return self.run('echo %s > %s' % (shlex.quote(data), file_name))
+        return self.run("echo %s > %s" % (shlex.quote(data), file_name))
 
     def append_file(self, file_name, data):
         """Appends a block of data to a file through the shell.
@@ -168,7 +168,7 @@
             file_name: The name of the file to write to.
             data: The string of data to write.
         """
-        return self.run('echo %s >> %s' % (shlex.quote(data), file_name))
+        return self.run("echo %s >> %s" % (shlex.quote(data), file_name))
 
     def touch_file(self, file_name):
         """Creates a file through the shell.
@@ -176,7 +176,7 @@
         Args:
             file_name: The name of the file to create.
         """
-        self.write_file(file_name, '')
+        self.write_file(file_name, "")
 
     def delete_file(self, file_name):
         """Deletes a file through the shell.
@@ -185,9 +185,9 @@
             file_name: The name of the file to delete.
         """
         try:
-            self.run('rm -r %s' % file_name)
+            self.run("rm -r %s" % file_name)
         except job.Error as e:
-            if 'No such file or directory' in e.result.stderr:
+            if "No such file or directory" in e.result.stderr:
                 return
 
             raise
@@ -241,4 +241,4 @@
             job.Error: Raised when the signal fail to reach
                        the specified program.
         """
-        self.run('kill -%d %d' % (sig, pid))
+        self.run("kill -%d %d" % (sig, pid))
diff --git a/src/antlion/controllers/utils_lib/host_utils.py b/src/antlion/controllers/utils_lib/host_utils.py
index db7be27..1b66089 100644
--- a/src/antlion/controllers/utils_lib/host_utils.py
+++ b/src/antlion/controllers/utils_lib/host_utils.py
@@ -43,15 +43,16 @@
     Returns:
         True if the port is available; False otherwise.
     """
-    return (_try_bind(port, socket.SOCK_STREAM, socket.IPPROTO_TCP) and
-            _try_bind(port, socket.SOCK_DGRAM, socket.IPPROTO_UDP))
+    return _try_bind(port, socket.SOCK_STREAM, socket.IPPROTO_TCP) and _try_bind(
+        port, socket.SOCK_DGRAM, socket.IPPROTO_UDP
+    )
 
 
 def _try_bind(port, socket_type, socket_proto):
     s = socket.socket(socket.AF_INET, socket_type, socket_proto)
     try:
         try:
-            s.bind(('', port))
+            s.bind(("", port))
             # The result of getsockname() is protocol dependent, but for both
             # IPv4 and IPv6 the second field is a port number.
             return s.getsockname()[1]
diff --git a/src/antlion/controllers/utils_lib/ssh/connection.py b/src/antlion/controllers/utils_lib/ssh/connection.py
index 799905e..23c80d7 100644
--- a/src/antlion/controllers/utils_lib/ssh/connection.py
+++ b/src/antlion/controllers/utils_lib/ssh/connection.py
@@ -37,6 +37,7 @@
     Attributes:
         result: The results of the ssh command that had the error.
     """
+
     def __init__(self, result):
         """
         Args:
@@ -45,12 +46,14 @@
         self.result = result
 
     def __str__(self):
-        return 'cmd: %s\nstdout: %s\nstderr: %s' % (
-            self.result.command, self.result.stdout, self.result.stderr)
+        return "cmd: %s\nstdout: %s\nstderr: %s" % (
+            self.result.command,
+            self.result.stdout,
+            self.result.stderr,
+        )
 
 
-_Tunnel = collections.namedtuple('_Tunnel',
-                                 ['local_port', 'remote_port', 'proc'])
+_Tunnel = collections.namedtuple("_Tunnel", ["local_port", "remote_port", "proc"])
 
 
 class SshConnection(object):
@@ -61,10 +64,11 @@
     a command is run. If the persistent connection fails it will attempt
     to connect normally.
     """
+
     @property
     def socket_path(self):
         """Returns: The os path to the master socket file."""
-        return os.path.join(self._master_ssh_tempdir, 'socket')
+        return os.path.join(self._master_ssh_tempdir, "socket")
 
     def __init__(self, settings):
         """
@@ -81,7 +85,7 @@
         self._tunnels = list()
 
         def log_line(msg):
-            return '[SshConnection | %s] %s' % (self._settings.hostname, msg)
+            return "[SshConnection | %s] %s" % (self._settings.hostname, msg)
 
         self.log = logger.create_logger(log_line)
 
@@ -110,34 +114,35 @@
         with self._lock:
             if self._master_ssh_proc is not None:
                 socket_path = self.socket_path
-                if (not os.path.exists(socket_path)
-                        or self._master_ssh_proc.poll() is not None):
-                    self.log.debug('Master ssh connection to %s is down.',
-                                   self._settings.hostname)
+                if (
+                    not os.path.exists(socket_path)
+                    or self._master_ssh_proc.poll() is not None
+                ):
+                    self.log.debug(
+                        "Master ssh connection to %s is down.", self._settings.hostname
+                    )
                     self._cleanup_master_ssh()
 
             if self._master_ssh_proc is None:
                 # Create a shared socket in a temp location.
-                self._master_ssh_tempdir = tempfile.mkdtemp(
-                    prefix='ssh-master')
+                self._master_ssh_tempdir = tempfile.mkdtemp(prefix="ssh-master")
 
                 # Setup flags and options for running the master ssh
                 # -N: Do not execute a remote command.
                 # ControlMaster: Spawn a master connection.
                 # ControlPath: The master connection socket path.
-                extra_flags = {'-N': None}
+                extra_flags = {"-N": None}
                 extra_options = {
-                    'ControlMaster': True,
-                    'ControlPath': self.socket_path,
-                    'BatchMode': True
+                    "ControlMaster": True,
+                    "ControlPath": self.socket_path,
+                    "BatchMode": True,
                 }
 
                 # Construct the command and start it.
                 master_cmd = self._formatter.format_ssh_local_command(
-                    self._settings,
-                    extra_flags=extra_flags,
-                    extra_options=extra_options)
-                self.log.info('Starting master ssh connection.')
+                    self._settings, extra_flags=extra_flags, extra_options=extra_options
+                )
+                self.log.info("Starting master ssh connection.")
                 self._master_ssh_proc = job.run_async(master_cmd)
 
                 end_time = time.time() + timeout_seconds
@@ -145,18 +150,20 @@
                 while time.time() < end_time:
                     if os.path.exists(self.socket_path):
                         break
-                    time.sleep(.2)
+                    time.sleep(0.2)
                 else:
                     self._cleanup_master_ssh()
-                    raise Error('Master ssh connection timed out.')
+                    raise Error("Master ssh connection timed out.")
 
-    def run(self,
-            command,
-            timeout=60,
-            ignore_status=False,
-            env=None,
-            io_encoding='utf-8',
-            attempts=2):
+    def run(
+        self,
+        command,
+        timeout=60,
+        ignore_status=False,
+        env=None,
+        io_encoding="utf-8",
+        attempts=2,
+    ):
         """Runs a remote command over ssh.
 
         Will ssh to a remote host and run a command. This method will
@@ -189,93 +196,98 @@
         try:
             self.setup_master_ssh(self._settings.connect_timeout)
         except Error:
-            self.log.warning('Failed to create master ssh connection, using '
-                             'normal ssh connection.')
+            self.log.warning(
+                "Failed to create master ssh connection, using "
+                "normal ssh connection."
+            )
 
-        extra_options = {'BatchMode': True}
+        extra_options = {"BatchMode": True}
         if self._master_ssh_proc:
-            extra_options['ControlPath'] = self.socket_path
+            extra_options["ControlPath"] = self.socket_path
 
         identifier = str(uuid.uuid4())
         full_command = 'echo "CONNECTED: %s"; %s' % (identifier, command)
 
         terminal_command = self._formatter.format_command(
-            full_command, env, self._settings, extra_options=extra_options)
+            full_command, env, self._settings, extra_options=extra_options
+        )
 
         dns_retry_count = 2
         while True:
-            result = job.run(terminal_command,
-                             ignore_status=True,
-                             timeout=timeout,
-                             io_encoding=io_encoding)
+            result = job.run(
+                terminal_command,
+                ignore_status=True,
+                timeout=timeout,
+                io_encoding=io_encoding,
+            )
             output = result.stdout
 
             # Check for a connected message to prevent false negatives.
-            valid_connection = re.search('^CONNECTED: %s' % identifier,
-                                         output,
-                                         flags=re.MULTILINE)
+            valid_connection = re.search(
+                "^CONNECTED: %s" % identifier, output, flags=re.MULTILINE
+            )
             if valid_connection:
                 # Remove the first line that contains the connect message.
-                line_index = output.find('\n') + 1
+                line_index = output.find("\n") + 1
                 if line_index == 0:
                     line_index = len(output)
                 real_output = output[line_index:].encode(io_encoding)
 
-                result = job.Result(command=result.command,
-                                    stdout=real_output,
-                                    stderr=result._raw_stderr,
-                                    exit_status=result.exit_status,
-                                    duration=result.duration,
-                                    did_timeout=result.did_timeout,
-                                    encoding=io_encoding)
+                result = job.Result(
+                    command=result.command,
+                    stdout=real_output,
+                    stderr=result._raw_stderr,
+                    exit_status=result.exit_status,
+                    duration=result.duration,
+                    did_timeout=result.did_timeout,
+                    encoding=io_encoding,
+                )
                 if result.exit_status and not ignore_status:
                     raise job.Error(result)
                 return result
 
             error_string = result.stderr
 
-            had_dns_failure = (result.exit_status == 255 and re.search(
-                r'^ssh: .*: Name or service not known',
-                error_string,
-                flags=re.MULTILINE))
+            had_dns_failure = result.exit_status == 255 and re.search(
+                r"^ssh: .*: Name or service not known", error_string, flags=re.MULTILINE
+            )
             if had_dns_failure:
                 dns_retry_count -= 1
                 if not dns_retry_count:
-                    raise Error('DNS failed to find host.', result)
-                self.log.debug('Failed to connect to host, retrying...')
+                    raise Error("DNS failed to find host.", result)
+                self.log.debug("Failed to connect to host, retrying...")
             else:
                 break
 
         had_timeout = re.search(
-            r'^ssh: connect to host .* port .*: '
-            r'Connection timed out\r$',
+            r"^ssh: connect to host .* port .*: " r"Connection timed out\r$",
             error_string,
-            flags=re.MULTILINE)
+            flags=re.MULTILINE,
+        )
         if had_timeout:
-            raise Error('Ssh timed out.', result)
+            raise Error("Ssh timed out.", result)
 
-        permission_denied = 'Permission denied' in error_string
+        permission_denied = "Permission denied" in error_string
         if permission_denied:
-            raise Error('Permission denied.', result)
+            raise Error("Permission denied.", result)
 
         unknown_host = re.search(
-            r'ssh: Could not resolve hostname .*: '
-            r'Name or service not known',
+            r"ssh: Could not resolve hostname .*: " r"Name or service not known",
             error_string,
-            flags=re.MULTILINE)
+            flags=re.MULTILINE,
+        )
         if unknown_host:
-            raise Error('Unknown host.', result)
+            raise Error("Unknown host.", result)
 
-        self.log.error('An unknown error has occurred. Job result: %s' %
-                       result)
-        ping_output = job.run('ping %s -c 3 -w 1' % self._settings.hostname,
-                              ignore_status=True)
-        self.log.error('Ping result: %s' % ping_output)
+        self.log.error("An unknown error has occurred. Job result: %s" % result)
+        ping_output = job.run(
+            "ping %s -c 3 -w 1" % self._settings.hostname, ignore_status=True
+        )
+        self.log.error("Ping result: %s" % ping_output)
         if attempts > 1:
             self._cleanup_master_ssh()
-            self.run(command, timeout, ignore_status, env, io_encoding,
-                     attempts - 1)
-        raise Error('The job failed for unknown reasons.', result)
+            self.run(command, timeout, ignore_status, env, io_encoding, attempts - 1)
+        raise Error("The job failed for unknown reasons.", result)
 
     def run_async(self, command, env=None):
         """Starts up a background command over ssh.
@@ -298,7 +310,9 @@
             SshPermissionDeniedError: When permission is not allowed on the
                                       remote host.
         """
-        return self.run(f'({command}) < /dev/null > /dev/null 2>&1 & echo -n $!', env=env)
+        return self.run(
+            f"({command}) < /dev/null > /dev/null 2>&1 & echo -n $!", env=env
+        )
 
     def close(self):
         """Clean up open connections to remote host."""
@@ -313,14 +327,14 @@
         """
         # If a master SSH connection is running, kill it.
         if self._master_ssh_proc is not None:
-            self.log.debug('Nuking master_ssh_job.')
+            self.log.debug("Nuking master_ssh_job.")
             self._master_ssh_proc.kill()
             self._master_ssh_proc.wait()
             self._master_ssh_proc = None
 
         # Remove the temporary directory for the master SSH socket.
         if self._master_ssh_tempdir is not None:
-            self.log.debug('Cleaning master_ssh_tempdir.')
+            self.log.debug("Cleaning master_ssh_tempdir.")
             shutil.rmtree(self._master_ssh_tempdir)
             self._master_ssh_tempdir = None
 
@@ -346,24 +360,27 @@
                     return tunnel.local_port
 
         extra_flags = {
-            '-n': None,  # Read from /dev/null for stdin
-            '-N': None,  # Do not execute a remote command
-            '-q': None,  # Suppress warnings and diagnostic commands
-            '-L': '%d:localhost:%d' % (local_port, port),
+            "-n": None,  # Read from /dev/null for stdin
+            "-N": None,  # Do not execute a remote command
+            "-q": None,  # Suppress warnings and diagnostic commands
+            "-L": "%d:localhost:%d" % (local_port, port),
         }
         extra_options = dict()
         if self._master_ssh_proc:
-            extra_options['ControlPath'] = self.socket_path
+            extra_options["ControlPath"] = self.socket_path
         tunnel_cmd = self._formatter.format_ssh_local_command(
-            self._settings,
-            extra_flags=extra_flags,
-            extra_options=extra_options)
-        self.log.debug('Full tunnel command: %s', tunnel_cmd)
+            self._settings, extra_flags=extra_flags, extra_options=extra_options
+        )
+        self.log.debug("Full tunnel command: %s", tunnel_cmd)
         # Exec the ssh process directly so that when we deliver signals, we
         # deliver them straight to the child process.
         tunnel_proc = job.run_async(tunnel_cmd)
-        self.log.debug('Started ssh tunnel, local = %d remote = %d, pid = %d',
-                       local_port, port, tunnel_proc.pid)
+        self.log.debug(
+            "Started ssh tunnel, local = %d remote = %d, pid = %d",
+            local_port,
+            port,
+            tunnel_proc.pid,
+        )
         self._tunnels.append(_Tunnel(local_port, port, tunnel_proc))
         return local_port
 
@@ -400,8 +417,10 @@
         """
         # TODO: This may belong somewhere else: b/32572515
         user_host = self._formatter.format_host_name(self._settings)
-        job.run('scp %s %s:%s' % (local_path, user_host, remote_path),
-                ignore_status=ignore_status)
+        job.run(
+            "scp %s %s:%s" % (local_path, user_host, remote_path),
+            ignore_status=ignore_status,
+        )
 
     def pull_file(self, local_path, remote_path, ignore_status=False):
         """Send a file from remote host to local host
@@ -412,10 +431,12 @@
             ignore_status: Whether or not to ignore the command's exit_status.
         """
         user_host = self._formatter.format_host_name(self._settings)
-        job.run('scp %s:%s %s' % (user_host, remote_path, local_path),
-                ignore_status=ignore_status)
+        job.run(
+            "scp %s:%s %s" % (user_host, remote_path, local_path),
+            ignore_status=ignore_status,
+        )
 
-    def find_free_port(self, interface_name='localhost'):
+    def find_free_port(self, interface_name="localhost"):
         """Find a unused port on the remote host.
 
         Note that this method is inherently racy, since it is impossible
@@ -431,7 +452,7 @@
         # TODO: This may belong somewhere else: b/3257251
         free_port_cmd = (
             'python -c "import socket; s=socket.socket(); '
-            's.bind((\'%s\', 0)); print(s.getsockname()[1]); s.close()"'
+            "s.bind(('%s', 0)); print(s.getsockname()[1]); s.close()\""
         ) % interface_name
         port = int(self.run(free_port_cmd).stdout)
         # Yield to the os to ensure the port gets cleaned up.
diff --git a/src/antlion/controllers/utils_lib/ssh/formatter.py b/src/antlion/controllers/utils_lib/ssh/formatter.py
index 2466012..41450ca 100644
--- a/src/antlion/controllers/utils_lib/ssh/formatter.py
+++ b/src/antlion/controllers/utils_lib/ssh/formatter.py
@@ -43,7 +43,7 @@
         Returns:
             A string of the connection host name to connect to.
         """
-        return '%s@%s' % (settings.username, settings.hostname)
+        return "%s@%s" % (settings.username, settings.hostname)
 
     def format_value(self, value):
         """Formats a command line value.
@@ -58,7 +58,7 @@
             A string representation of the formatted value.
         """
         if isinstance(value, bool):
-            return 'yes' if value else 'no'
+            return "yes" if value else "no"
 
         return str(value)
 
@@ -77,8 +77,8 @@
         for option_name in options:
             option = options[option_name]
 
-            yield '-o'
-            yield '%s=%s' % (option_name, self.format_value(option))
+            yield "-o"
+            yield "%s=%s" % (option_name, self.format_value(option))
 
     def format_flag_list(self, flags):
         """Format the flags list.
@@ -99,10 +99,7 @@
             if flag is not None:
                 yield self.format_value(flag)
 
-    def format_ssh_local_command(self,
-                                 settings,
-                                 extra_flags={},
-                                 extra_options={}):
+    def format_ssh_local_command(self, settings, extra_flags={}, extra_options={}):
         """Formats the local part of the ssh command.
 
         Formats the local section of the ssh command. This is the part of the
@@ -136,11 +133,9 @@
 
         return base_command
 
-    def format_ssh_command(self,
-                           remote_command,
-                           settings,
-                           extra_flags={},
-                           extra_options={}):
+    def format_ssh_command(
+        self, remote_command, settings, extra_flags={}, extra_options={}
+    ):
         """Formats the full ssh command.
 
         Creates the full format for an ssh command.
@@ -155,8 +150,9 @@
         Returns:
             A list of strings that make up the total ssh command.
         """
-        local_command = self.format_ssh_local_command(settings, extra_flags,
-                                                      extra_options)
+        local_command = self.format_ssh_local_command(
+            settings, extra_flags, extra_options
+        )
 
         local_command.append(remote_command)
         return local_command
@@ -175,23 +171,18 @@
             machine.
         """
         if not env:
-            env_str = ''
+            env_str = ""
         else:
-            env_str = 'export '
+            env_str = "export "
             for name in env:
                 value = env[name]
-                env_str += '%s=%s ' % (name, str(value))
-            env_str += ';'
+                env_str += "%s=%s " % (name, str(value))
+            env_str += ";"
 
-        execution_line = '%s %s;' % (env_str, command)
+        execution_line = "%s %s;" % (env_str, command)
         return execution_line
 
-    def format_command(self,
-                       command,
-                       env,
-                       settings,
-                       extra_flags={},
-                       extra_options={}):
+    def format_command(self, command, env, settings, extra_flags={}, extra_options={}):
         """Formats a full command.
 
         Formats the full command to run in order to run a command on a remote
@@ -206,5 +197,6 @@
             extra_options: Extra options to include with the settings.
         """
         remote_command = self.format_remote_command(command, env)
-        return self.format_ssh_command(remote_command, settings, extra_flags,
-                                       extra_options)
+        return self.format_ssh_command(
+            remote_command, settings, extra_flags, extra_options
+        )
diff --git a/src/antlion/controllers/utils_lib/ssh/settings.py b/src/antlion/controllers/utils_lib/ssh/settings.py
index ca14b91..ead5844 100644
--- a/src/antlion/controllers/utils_lib/ssh/settings.py
+++ b/src/antlion/controllers/utils_lib/ssh/settings.py
@@ -20,23 +20,7 @@
     An instance of SshSettings or None
 """
 
-
-def from_config(config):
-    if config is None:
-        return None  # Having no settings is not an error
-
-    user = config.get('user', None)
-    host = config.get('host', None)
-    port = config.get('port', 22)
-    identity_file = config.get('identity_file', None)
-    ssh_config = config.get('ssh_config', None)
-    connect_timeout = config.get('connect_timeout', 30)
-    if user is None or host is None:
-        raise ValueError('Malformed SSH config did not include user and '
-                         'host keys: %s' % config)
-
-    return SshSettings(host, user, port=port, identity_file=identity_file,
-                       ssh_config=ssh_config, connect_timeout=connect_timeout)
+from typing import Dict, Optional, Union
 
 
 class SshSettings(object):
@@ -56,16 +40,18 @@
                         connection alive.
     """
 
-    def __init__(self,
-                 hostname,
-                 username,
-                 port=22,
-                 host_file='/dev/null',
-                 connect_timeout=30,
-                 alive_interval=300,
-                 executable='/usr/bin/ssh',
-                 identity_file=None,
-                 ssh_config=None):
+    def __init__(
+        self,
+        hostname: str,
+        username: str,
+        port: int = 22,
+        host_file: str = "/dev/null",
+        connect_timeout: int = 30,
+        alive_interval: int = 300,
+        executable: str = "/usr/bin/ssh",
+        identity_file: Optional[str] = None,
+        ssh_config: Optional[str] = None,
+    ):
         self.username = username
         self.hostname = hostname
         self.executable = executable
@@ -76,7 +62,7 @@
         self.identity_file = identity_file
         self.ssh_config = ssh_config
 
-    def construct_ssh_options(self):
+    def construct_ssh_options(self) -> Dict[str, Union[str, int, bool]]:
         """Construct the ssh options.
 
         Constructs a dictionary of option that should be used with the ssh
@@ -86,27 +72,54 @@
             A dictionary of option name to value.
         """
         current_options = {}
-        current_options['StrictHostKeyChecking'] = False
-        current_options['UserKnownHostsFile'] = self.host_file
-        current_options['ConnectTimeout'] = self.connect_timeout
-        current_options['ServerAliveInterval'] = self.alive_interval
+        current_options["StrictHostKeyChecking"] = False
+        current_options["UserKnownHostsFile"] = self.host_file
+        current_options["ConnectTimeout"] = self.connect_timeout
+        current_options["ServerAliveInterval"] = self.alive_interval
         return current_options
 
-    def construct_ssh_flags(self):
+    def construct_ssh_flags(self) -> Dict[str, Union[None, str, int]]:
         """Construct the ssh flags.
 
         Constructs what flags should be used in the ssh connection.
 
         Returns:
-            A dictonary of flag name to value. If value is none then it is
+            A dictionary of flag name to value. If value is none then it is
             treated as a binary flag.
         """
         current_flags = {}
-        current_flags['-a'] = None
-        current_flags['-x'] = None
-        current_flags['-p'] = self.port
+        current_flags["-a"] = None
+        current_flags["-x"] = None
+        current_flags["-p"] = self.port
         if self.identity_file:
-            current_flags['-i'] = self.identity_file
+            current_flags["-i"] = self.identity_file
         if self.ssh_config:
-            current_flags['-F'] = self.ssh_config
+            current_flags["-F"] = self.ssh_config
         return current_flags
+
+
+def from_config(config: Dict[str, Union[str, int]]) -> Optional[SshSettings]:
+    if config is None:
+        return None  # Having no settings is not an error
+
+    ssh_binary_path = config.get("ssh_binary_path", "/usr/bin/ssh")
+    user = config.get("user", None)
+    host = config.get("host", None)
+    port = config.get("port", 22)
+    identity_file = config.get("identity_file", None)
+    ssh_config = config.get("ssh_config", None)
+    connect_timeout = config.get("connect_timeout", 30)
+    if user is None or host is None:
+        raise ValueError(
+            f"Malformed SSH config did not include user and host keys: {config}"
+        )
+
+    return SshSettings(
+        host,
+        user,
+        port=port,
+        identity_file=identity_file,
+        ssh_config=ssh_config,
+        connect_timeout=connect_timeout,
+        executable=ssh_binary_path,
+    )
diff --git a/src/antlion/controllers/uxm_lib/OWNERS b/src/antlion/controllers/uxm_lib/OWNERS
deleted file mode 100644
index 0c40622..0000000
--- a/src/antlion/controllers/uxm_lib/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-jethier@google.com
-hmtuan@google.com
-harjani@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/uxm_lib/uxm_cellular_simulator.py b/src/antlion/controllers/uxm_lib/uxm_cellular_simulator.py
deleted file mode 100644
index 713d3cf..0000000
--- a/src/antlion/controllers/uxm_lib/uxm_cellular_simulator.py
+++ /dev/null
@@ -1,707 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import os
-import socket
-import time
-import paramiko
-import re
-
-from antlion.controllers.cellular_simulator import AbstractCellularSimulator
-
-
-class UXMCellularSimulator(AbstractCellularSimulator):
-    """A cellular simulator for UXM callbox."""
-
-    # Keys to obtain data from cell_info dictionary.
-    KEY_CELL_NUMBER = "cell_number"
-    KEY_CELL_TYPE = "cell_type"
-
-    # UXM socket port
-    UXM_PORT = 5125
-
-    # UXM SCPI COMMAND
-    SCPI_IMPORT_STATUS_QUERY_CMD = 'SYSTem:SCPI:IMPort:STATus?'
-    SCPI_SYSTEM_ERROR_CHECK_CMD = 'SYST:ERR?\n'
-    # require: path to SCPI file
-    SCPI_IMPORT_SCPI_FILE_CMD = 'SYSTem:SCPI:IMPort "{}"\n'
-    # require: 1. cell type (E.g. NR5G), 2. cell number (E.g CELL1)
-    SCPI_CELL_ON_CMD = 'BSE:CONFig:{}:{}:ACTive 1'
-    # require: 1. cell type (E.g. NR5G), 2. cell number (E.g CELL1)
-    SCPI_CELL_OFF_CMD = 'BSE:CONFig:{}:{}:ACTive 0'
-    # require: 1. cell type (E.g. NR5G), 2. cell number (E.g CELL1)
-    SCPI_GET_CELL_STATUS = 'BSE:STATus:{}:{}?'
-    SCPI_CHECK_CONNECTION_CMD = '*IDN?\n'
-
-    # UXM's Test Application recovery
-    TA_BOOT_TIME = 100
-
-    # shh command
-    SSH_START_GUI_APP_CMD_FORMAT = 'psexec -s -d -i 1 "{exe_path}"'
-    SSH_CHECK_APP_RUNNING_CMD_FORMAT = 'tasklist | findstr /R {regex_app_name}'
-
-    # start process success regex
-    PSEXEC_PROC_STARTED_REGEX_FORMAT = 'started on * with process ID {proc_id}'
-
-    def __init__(self, ip_address, custom_files, uxm_user,
-                 ssh_private_key_to_uxm, ta_exe_path, ta_exe_name):
-        """Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of host where Keysight Test Application (TA)
-                is installed.
-            custom_files: a list of file path for custom files.
-            uxm_user: username of host where Keysight TA resides.
-            ssh_private_key_to_uxm: private key for key based ssh to
-                host where Keysight TA resides.
-            ta_exe_path: path to TA exe.
-            ta_exe_name: name of TA exe.
-        """
-        super().__init__()
-        self.custom_files = custom_files
-        self.rockbottom_script = None
-        self.cells = []
-        self.uxm_ip = ip_address
-        self.uxm_user = uxm_user
-        self.ssh_private_key_to_uxm = ssh_private_key_to_uxm
-        self.ta_exe_path = ta_exe_path
-        self.ta_exe_name = ta_exe_name
-        self.ssh_client = self._create_ssh_client()
-
-        # get roclbottom file
-        for file in self.custom_files:
-            if 'rockbottom_' in file:
-                self.rockbottom_script = file
-
-        # connect to Keysight Test Application via socket
-        self.recovery_ta()
-        self.socket = self._socket_connect(self.uxm_ip, self.UXM_PORT)
-        self.check_socket_connection()
-        self.timeout = 120
-
-    def _create_ssh_client(self):
-        """Create a ssh client to host."""
-        ssh = paramiko.SSHClient()
-        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-        mykey = paramiko.Ed25519Key.from_private_key_file(
-            self.ssh_private_key_to_uxm)
-        ssh.connect(hostname=self.uxm_ip, username=self.uxm_user, pkey=mykey)
-        self.log.info('SSH client to %s is connected' % self.uxm_ip)
-        return ssh
-
-    def is_ta_running(self):
-        is_running_cmd = self.SSH_CHECK_APP_RUNNING_CMD_FORMAT.format(
-            regex_app_name=self.ta_exe_name)
-        stdin, stdout, stderr = self.ssh_client.exec_command(is_running_cmd)
-        stdin.close()
-        err = ''.join(stderr.readlines())
-        out = ''.join(stdout.readlines())
-        final_output = str(out) + str(err)
-        self.log.info(final_output)
-        return (out != '' and err == '')
-
-    def _start_test_app(self):
-        """Start Test Application on Windows."""
-        # start GUI exe via ssh
-        start_app_cmd = self.SSH_START_GUI_APP_CMD_FORMAT.format(
-            exe_path=self.ta_exe_path)
-        stdin, stdout, stderr = self.ssh_client.exec_command(start_app_cmd)
-        self.log.info(f'Command sent to {self.uxm_ip}: {start_app_cmd}')
-        stdin.close()
-        err = ''.join(stderr.readlines())
-        out = ''.join(stdout.readlines())
-        # psexec return process ID as part of the exit code
-        exit_status = stderr.channel.recv_exit_status()
-        is_started = re.search(
-            self.PSEXEC_PROC_STARTED_REGEX_FORMAT.format(proc_id=exit_status),
-            err[-1])
-        if is_started:
-            raise RuntimeError('Fail to start TA: ' + out + err)
-        # wait for ta completely boot up
-        self.log.info('TA is starting')
-        time.sleep(self.TA_BOOT_TIME)
-
-    def recovery_ta(self):
-        """Start TA if it is not running."""
-        if not self.is_ta_running():
-            self._start_test_app()
-            # checking if ta booting process complete
-            # by checking socket connection
-            s = None
-            retries = 12
-            for _ in range(retries):
-                try:
-                    s = self._socket_connect(self.uxm_ip, self.UXM_PORT)
-                    s.close()
-                    return
-                except ConnectionRefusedError as cre:
-                    self.log.info(
-                        'Connection refused, wait 10s for TA to boot')
-                    time.sleep(10)
-            raise RuntimeError('TA does not start on time')
-
-    def set_rockbottom_script_path(self, path):
-        """Set path to rockbottom script.
-
-        Args:
-            path: path to rockbottom script.
-        """
-        self.rockbottom_script = path
-
-    def set_cell_info(self, cell_info):
-        """Set type and number for multiple cells.
-
-        Args:
-            cell_info: list of dictionaries,
-                each dictionary contain cell type
-                and cell number for each cell
-                that the simulator need to control.
-        """
-        if not cell_info:
-            raise ValueError('Missing cell info from configurations file')
-        self.cells = cell_info
-
-    def turn_cell_on(self, cell_type, cell_number):
-        """Turn UXM's cell on.
-
-        Args:
-            cell_type: type of cell (e.g NR5G, LTE).
-            cell_number: ordinal number of a cell.
-        """
-        if cell_type and cell_number:
-            self._socket_send_SCPI_command(
-                self.SCPI_CELL_ON_CMD.format(cell_type, cell_number))
-        else:
-            raise ValueError('Invalid cell info\n' +
-                             f' cell type: {cell_type}\n' +
-                             f' cell number: {cell_number}\n')
-
-    def turn_cell_off(self, cell_type, cell_number):
-        """Turn UXM's cell off.
-
-        Args:
-            cell_type: type of cell (e.g NR5G, LTE).
-            cell_number: ordinal number of a cell.
-        """
-        if cell_type and cell_number:
-            self._socket_send_SCPI_command(
-                self.SCPI_CELL_OFF_CMD.format(cell_type, cell_number))
-        else:
-            raise ValueError('Invalid cell info\n' +
-                             f' cell type: {cell_type}\n' +
-                             f' cell number: {cell_number}\n')
-
-    def get_cell_status(self, cell_type, cell_number):
-        """Get status of cell.
-
-        Args:
-            cell_type: type of cell (e.g NR5G, LTE).
-            cell_number: ordinal number of a cell.
-        """
-        if not cell_type or not cell_number:
-            raise ValueError('Invalid cell with\n' +
-                             f' cell type: {cell_type}\n' +
-                             f' cell number: {cell_number}\n')
-
-        return self._socket_send_SCPI_for_result_command(
-            self.SCPI_GET_CELL_STATUS.format(cell_type, cell_number))
-
-    def check_socket_connection(self):
-        """Check if the socket connection is established.
-
-        Query the identification of the Keysight Test Application
-        we are trying to connect to. Empty response indicates
-        connection fail, and vice versa.
-        """
-        self.socket.sendall(self.SCPI_CHECK_CONNECTION_CMD.encode())
-        response = self.socket.recv(1024).decode()
-        if response:
-            self.log.info(f'Connected to: {response}')
-        else:
-            self.log.error('Fail to connect to callbox')
-
-    def _socket_connect(self, host, port):
-        """Create socket connection.
-
-        Args:
-            host: IP address of desktop where Keysight Test Application resides.
-            port: port that Keysight Test Application is listening for socket
-                communication.
-        Return:
-            s: socket object.
-        """
-        self.log.info('Establishing connection to callbox via socket')
-        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s.connect((host, port))
-        return s
-
-    def _socket_send_SCPI_command(self, command):
-        """Send SCPI command without expecting response.
-
-        Args:
-            command: a string SCPI command.
-        """
-        # make sure there is a line break for the socket to send command
-        command = command + '\n'
-        # send command
-        self.socket.sendall(command.encode())
-        self.log.info(f'Sent {command}')
-
-    def _socket_receive_SCPI_result(self):
-        """Receive response from socket. """
-        i = 1
-        response = ''
-        while i < self.timeout and not response:
-            response = self.socket.recv(1024).decode()
-            i += 1
-        return response
-
-    def _socket_send_SCPI_for_result_command(self, command):
-        """Send SCPI command and expecting response.
-
-        Args:
-            command: a string SCPI command.
-        """
-        self._socket_send_SCPI_command(command)
-        response = self._socket_receive_SCPI_result()
-        return response
-
-    def check_system_error(self):
-        """Query system error from Keysight Test Application.
-
-        Return:
-            status: a message indicate the number of errors
-                and detail of errors if any.
-                a string `0,"No error"` indicates no error.
-        """
-        status = self._socket_send_SCPI_for_result_command(
-            self.SCPI_SYSTEM_ERROR_CHECK_CMD)
-        self.log.info(f'System error status: {status}')
-        return status
-
-    def import_configuration(self, path):
-        """Import SCPI config file.
-
-        Args:
-            path: path to SCPI file.
-        """
-        self._socket_send_SCPI_command(
-            self.SCPI_IMPORT_SCPI_FILE_CMD.format(path))
-        time.sleep(45)
-
-    def destroy(self):
-        """Close socket connection with UXM. """
-        self.socket.close()
-
-    def setup_lte_scenario(self, path):
-        """Configures the equipment for an LTE simulation.
-
-        Args:
-            path: path to SCPI config file.
-        """
-        self.import_configuration(path)
-
-    def dut_rockbottom(self, dut):
-        """Set the dut to rockbottom state.
-
-        Args:
-            dut: a CellularAndroid controller.
-        """
-        # The rockbottom script might include a device reboot, so it is
-        # necessary to stop SL4A during its execution.
-        dut.ad.stop_services()
-        self.log.info('Executing rockbottom script for ' + dut.ad.model)
-        os.chmod(self.rockbottom_script, 0o777)
-        os.system('{} {}'.format(self.rockbottom_script, dut.ad.serial))
-        # Make sure the DUT is in root mode after coming back
-        dut.ad.root_adb()
-        # Restart SL4A
-        dut.ad.start_services()
-
-    def wait_until_attached_one_cell(self,
-                                     cell_type,
-                                     cell_number,
-                                     dut,
-                                     wait_for_camp_interval,
-                                     attach_retries,
-                                     change_dut_setting_allow=True):
-        """Wait until connect to given UXM cell.
-
-        After turn off airplane mode, sleep for
-        wait_for_camp_interval seconds for device to camp.
-        If not device is not connected after the wait,
-        either toggle airplane mode on/off or reboot device.
-        Args:
-            cell_type: type of cell
-                which we are trying to connect to.
-            cell_number: ordinal number of a cell
-                which we are trying to connect to.
-            dut: a CellularAndroid controller.
-            wait_for_camp_interval: sleep interval,
-                wait for device to camp.
-            attach_retries: number of retry
-                to wait for device
-                to connect to 1 basestation.
-            change_dut_setting_allow: turn on/off APM
-                or reboot device helps with device camp time.
-                However, if we are trying to connect to second cell
-                changing APM status or reboot is not allowed.
-        Raise:
-            AbstractCellularSimulator.CellularSimulatorError:
-                device unable to connect to cell.
-        """
-        # airplane mode off
-        # dut.ad.adb.shell('settings put secure adaptive_connectivity_enabled 0')
-        dut.toggle_airplane_mode(False)
-        time.sleep(5)
-        # turn cell on
-        self.turn_cell_on(cell_type, cell_number)
-        time.sleep(5)
-
-        # waits for connect
-        for index in range(1, attach_retries):
-            # airplane mode on
-            time.sleep(wait_for_camp_interval)
-            cell_state = self.get_cell_status(cell_type, cell_number)
-            self.log.info(f'cell state: {cell_state}')
-            if cell_state == 'CONN\n':
-                return True
-            if cell_state == 'OFF\n':
-                self.turn_cell_on(cell_type, cell_number)
-                time.sleep(5)
-            if change_dut_setting_allow:
-                if (index % 4) == 0:
-                    dut.ad.reboot()
-                    if self.rockbottom_script:
-                        self.dut_rockbottom(dut)
-                    else:
-                        self.log.warning(
-                            f'Rockbottom script {self} was not executed after reboot'
-                        )
-                else:
-                    # airplane mode on
-                    dut.toggle_airplane_mode(True)
-                    time.sleep(5)
-                    # airplane mode off
-                    dut.toggle_airplane_mode(False)
-
-        # Phone cannot connected to basestation of callbox
-        raise RuntimeError(
-            f'Phone was unable to connect to cell: {cell_type}-{cell_number}')
-
-    def wait_until_attached(self, dut, timeout, attach_retries):
-        """Waits until the DUT is attached to all required cells.
-
-        Args:
-            dut: a CellularAndroid controller.
-            timeout: sleep interval,
-                wait for device to camp in 1 try.
-            attach_retries: number of retry
-                to wait for device
-                to connect to 1 basestation.
-        """
-        # get cell info
-        first_cell_type = self.cells[0][self.KEY_CELL_TYPE]
-        first_cell_number = self.cells[0][self.KEY_CELL_NUMBER]
-        if len(self.cells) == 2:
-            second_cell_type = self.cells[1][self.KEY_CELL_TYPE]
-            second_cell_number = self.cells[1][self.KEY_CELL_NUMBER]
-
-        # connect to 1st cell
-        try:
-            self.wait_until_attached_one_cell(first_cell_type,
-                                              first_cell_number, dut, timeout,
-                                              attach_retries)
-        except Exception as exc:
-            raise RuntimeError(f'Cannot connect to first cell') from exc
-
-        # connect to 2nd cell
-        if len(self.cells) == 2:
-            self.turn_cell_on(
-                second_cell_type,
-                second_cell_number,
-            )
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:DL None')
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:UL None')
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:DL CELL1')
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:DL CELL1')
-            time.sleep(1)
-            self._socket_send_SCPI_command(
-                "BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:APPly")
-            try:
-                self.wait_until_attached_one_cell(second_cell_type,
-                                                  second_cell_number, dut,
-                                                  timeout, attach_retries,
-                                                  False)
-            except Exception as exc:
-                raise RuntimeError(f'Cannot connect to second cell') from exc
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_band(self, bts_index, band):
-        """Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            band: the new band.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def get_duplex_mode(self, band):
-        """Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number.
-
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_input_power(self, bts_index, input_power):
-        """Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            input_power: the new input power.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_output_power(self, bts_index, output_power):
-        """Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            output_power: the new output power.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            tdd_config: the new tdd configuration number.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """Sets the Special Sub-Frame config number for the indicated.
-
-        base station.
-
-        Args:
-            bts_index: the base station number.
-            ssf_config: the new ssf config number.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            channel_number: the new channel number.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            tmode: the new transmission mode.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_scheduling_mode(self,
-                            bts_index,
-                            scheduling,
-                            mcs_dl=None,
-                            mcs_ul=None,
-                            nrb_dl=None,
-                            nrb_ul=None):
-        """Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """Determines what MCS table should be used for the downlink.
-
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number.
-            enabled: whether 256 QAM should be used.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """Determines what MCS table should be used for the uplink.
-
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number.
-            enabled: whether 64 QAM should be used.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            mac_padding: the new MAC padding setting.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_cfi(self, bts_index, cfi):
-        """Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            cfi: the new CFI setting.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            cycle_duration: the new paging cycle duration in milliseconds.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_phich_resource(self, bts_index, phich):
-        """Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            phich: the new PHICH resource setting.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """Activates the secondary carriers for CA.
-
-        Requires the DUT to be attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-              the UE before starting carrier aggregation.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise
-                a CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        for cell in self.cells:
-            cell_type = cell[self.KEY_CELL_TYPE]
-            cell_number = cell[self.KEY_CELL_NUMBER]
-            self._socket_send_SCPI_command(
-                self.SCPI_CELL_OFF_CMD.format(cell_type, cell_number))
-
-    def stop(self):
-        """Stops current simulation.
-
-        After calling this method, the simulator will need to be set up again.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def start_data_traffic(self):
-        """Starts transmitting data from the instrument to the DUT. """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def stop_data_traffic(self):
-        """Stops transmitting data from the instrument to the DUT. """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
diff --git a/src/antlion/dict_object.py b/src/antlion/dict_object.py
index 0be7821..9e3288f 100644
--- a/src/antlion/dict_object.py
+++ b/src/antlion/dict_object.py
@@ -67,8 +67,7 @@
         if name in super(DictObject, self).keys():
             super(DictObject, self).__setitem__(name, value)
         else:
-            raise AttributeError("Class does not have attribute {}"
-                                 .format(value))
+            raise AttributeError("Class does not have attribute {}".format(value))
 
     @classmethod
     def from_dict(cls, dictionary):
diff --git a/src/antlion/error.py b/src/antlion/error.py
index 95969e5..bf69b7d 100644
--- a/src/antlion/error.py
+++ b/src/antlion/error.py
@@ -6,14 +6,13 @@
 
 class ActsError(TestError):
     """Base Acts Error"""
+
     def __init__(self, *args, **kwargs):
         class_name = self.__class__.__name__
         self.error_doc = self.__class__.__doc__
-        self.error_code = getattr(ActsErrorCode, class_name,
-                                  ActsErrorCode.UNKNOWN)
-        extras = dict(**kwargs, error_doc=self.error_doc,
-                      error_code=self.error_code)
-        details = args[0] if len(args) > 0 else ''
+        self.error_code = getattr(ActsErrorCode, class_name, ActsErrorCode.UNKNOWN)
+        extras = dict(**kwargs, error_doc=self.error_doc, error_code=self.error_code)
+        details = args[0] if len(args) > 0 else ""
         super().__init__(details, extras)
 
 
diff --git a/src/antlion/event/decorators.py b/src/antlion/event/decorators.py
index c3f7b83..b845dad 100644
--- a/src/antlion/event/decorators.py
+++ b/src/antlion/event/decorators.py
@@ -23,22 +23,20 @@
 
     This function must be registered manually.
     """
+
     class InnerSubscriptionHandle(StaticSubscriptionHandle):
         def __init__(self, func):
-            super().__init__(event_type, func,
-                             event_filter=event_filter,
-                             order=order)
+            super().__init__(event_type, func, event_filter=event_filter, order=order)
 
     return InnerSubscriptionHandle
 
 
 def subscribe(event_type, event_filter=None, order=0):
     """A decorator that subscribes an instance method."""
+
     class InnerSubscriptionHandle(InstanceSubscriptionHandle):
         def __init__(self, func):
-            super().__init__(event_type, func,
-                             event_filter=event_filter,
-                             order=order)
+            super().__init__(event_type, func, event_filter=event_filter, order=order)
 
     return InnerSubscriptionHandle
 
@@ -58,8 +56,7 @@
 
 
 def register_instance_subscriptions(obj):
-    """A decorator that subscribes all instance subscriptions after object init.
-    """
+    """A decorator that subscribes all instance subscriptions after object init."""
     old_init = obj.__init__
 
     def init_replacement(self, *args, **kwargs):
diff --git a/src/antlion/event/event_bus.py b/src/antlion/event/event_bus.py
index 9c6a862..5488b80 100644
--- a/src/antlion/event/event_bus.py
+++ b/src/antlion/event/event_bus.py
@@ -58,9 +58,9 @@
         Returns:
             A registration ID.
         """
-        subscription = EventSubscription(event_type, func,
-                                         event_filter=filter_fn,
-                                         order=order)
+        subscription = EventSubscription(
+            event_type, func, event_filter=filter_fn, order=order
+        )
         return self.register_subscription(subscription)
 
     def register_subscriptions(self, subscriptions):
@@ -124,8 +124,9 @@
                 subscription.deliver(event)
             except Exception:
                 if ignore_errors:
-                    logging.exception('An exception occurred while handling '
-                                      'an event.')
+                    logging.exception(
+                        "An exception occurred while handling " "an event."
+                    )
                     continue
                 raise
 
@@ -146,20 +147,24 @@
         elif type(registration_id) is not int:
             raise ValueError(
                 'Subscription ID "%s" is not a valid ID. This value'
-                'must be an integer ID returned from subscribe().'
-                % registration_id)
+                "must be an integer ID returned from subscribe()." % registration_id
+            )
         else:
             # The value is a "valid" id, but is not subscribed. It's possible
             # another thread has unsubscribed this value.
-            logging.warning('Attempted to unsubscribe %s, but the matching '
-                            'subscription cannot be found.' % registration_id)
+            logging.warning(
+                "Attempted to unsubscribe %s, but the matching "
+                "subscription cannot be found." % registration_id
+            )
             return False
 
         event_type = subscription.event_type
         with self._subscription_lock:
             self._registration_id_map.pop(registration_id, None)
-            if (event_type in self._subscriptions and
-                    subscription in self._subscriptions[event_type]):
+            if (
+                event_type in self._subscriptions
+                and subscription in self._subscriptions[event_type]
+            ):
                 self._subscriptions[event_type].remove(subscription)
         return True
 
@@ -201,8 +206,7 @@
     Returns:
         A registration ID.
     """
-    return _event_bus.register(event_type, func, filter_fn=filter_fn,
-                               order=order)
+    return _event_bus.register(event_type, func, filter_fn=filter_fn, order=order)
 
 
 def register_subscriptions(subscriptions):
@@ -284,9 +288,9 @@
         self.registration_id = None
 
     def __enter__(self):
-        self.registration_id = _event_bus.register(self.event_type, self.func,
-                                                   filter_fn=self.filter_fn,
-                                                   order=self.order)
+        self.registration_id = _event_bus.register(
+            self.event_type, self.func, filter_fn=self.filter_fn, order=self.order
+        )
 
     def __exit__(self, *unused):
         _event_bus.unregister(self.registration_id)
diff --git a/src/antlion/event/event_subscription.py b/src/antlion/event/event_subscription.py
index e442507..ee8720c 100644
--- a/src/antlion/event/event_subscription.py
+++ b/src/antlion/event/event_subscription.py
@@ -25,6 +25,7 @@
                        to the subscribed function.
         order: The order value in which this subscription should be called.
     """
+
     def __init__(self, event_type, func, event_filter=None, order=0):
         self._event_type = event_type
         self._func = func
diff --git a/src/antlion/event/subscription_bundle.py b/src/antlion/event/subscription_bundle.py
index d936bd2..ac3cfb6 100644
--- a/src/antlion/event/subscription_bundle.py
+++ b/src/antlion/event/subscription_bundle.py
@@ -25,8 +25,7 @@
         """True if this SubscriptionBundle has been registered."""
         return self._registered
 
-    def add(self, event_type, func, event_filter=None,
-            order=0):
+    def add(self, event_type, func, event_filter=None, order=0):
         """Adds a new Subscription to this SubscriptionBundle.
 
         If this SubscriptionBundle is registered, the added Subscription will
@@ -35,9 +34,9 @@
         Returns:
             the EventSubscription object created.
         """
-        subscription = EventSubscription(event_type, func,
-                                         event_filter=event_filter,
-                                         order=order)
+        subscription = EventSubscription(
+            event_type, func, event_filter=event_filter, order=order
+        )
         return self.add_subscription(subscription)
 
     def add_subscription(self, subscription):
@@ -79,11 +78,14 @@
             self._registered = True
             for subscription, registration_id in self.subscriptions.items():
                 if registration_id is not None:
-                    logging.warning('Registered subscription found in '
-                                    'unregistered SubscriptionBundle: %s, %s' %
-                                    (subscription, registration_id))
-                self.subscriptions[subscription] = (
-                    event_bus.register_subscription(subscription))
+                    logging.warning(
+                        "Registered subscription found in "
+                        "unregistered SubscriptionBundle: %s, %s"
+                        % (subscription, registration_id)
+                    )
+                self.subscriptions[subscription] = event_bus.register_subscription(
+                    subscription
+                )
 
     def unregister(self):
         """Unregisters all subscriptions managed by this SubscriptionBundle."""
@@ -93,9 +95,11 @@
             self._registered = False
             for subscription, registration_id in self.subscriptions.items():
                 if registration_id is None:
-                    logging.warning('Unregistered subscription found in '
-                                    'registered SubscriptionBundle: %s, %s' %
-                                    (subscription, registration_id))
+                    logging.warning(
+                        "Unregistered subscription found in "
+                        "registered SubscriptionBundle: %s, %s"
+                        % (subscription, registration_id)
+                    )
                 event_bus.unregister(subscription)
                 self.subscriptions[subscription] = None
 
@@ -122,8 +126,7 @@
     Returns:
         An unregistered SubscriptionBundle.
     """
-    return _create_from_object(instance, instance.__class__,
-                               InstanceSubscriptionHandle)
+    return _create_from_object(instance, instance.__class__, InstanceSubscriptionHandle)
 
 
 def _create_from_object(obj, obj_to_search, subscription_handle_type):
diff --git a/src/antlion/event/subscription_handle.py b/src/antlion/event/subscription_handle.py
index 0123ab7..6aa9c3c 100644
--- a/src/antlion/event/subscription_handle.py
+++ b/src/antlion/event/subscription_handle.py
@@ -31,9 +31,12 @@
     def subscription(self):
         if self._subscription:
             return self._subscription
-        self._subscription = EventSubscription(self._event_type, self._func,
-                                               event_filter=self._event_filter,
-                                               order=self._order)
+        self._subscription = EventSubscription(
+            self._event_type,
+            self._func,
+            event_filter=self._event_filter,
+            order=self._order,
+        )
         return self._subscription
 
     def __get__(self, instance, owner):
@@ -44,8 +47,9 @@
 
         # Otherwise, we create a new SubscriptionHandle that will only be used
         # for the instance that owns this SubscriptionHandle.
-        ret = SubscriptionHandle(self._event_type, self._func,
-                                 self._event_filter, self._order)
+        ret = SubscriptionHandle(
+            self._event_type, self._func, self._event_filter, self._order
+        )
         ret._owner = instance
         ret._func = ret._wrap_call(ret._func)
         for attr, value in owner.__dict__.items():
@@ -60,6 +64,7 @@
                 return func(*args, **kwargs)
             else:
                 return func(self._owner, *args, **kwargs)
+
         return _wrapped_call
 
     def __call__(self, *args, **kwargs):
diff --git a/src/antlion/keys.py b/src/antlion/keys.py
index a89898c..b545d44 100644
--- a/src/antlion/keys.py
+++ b/src/antlion/keys.py
@@ -22,73 +22,55 @@
 
 
 class Config(enum.Enum):
-    """Enum values for test config related lookups.
-    """
+    """Enum values for test config related lookups."""
+
     # Keys used to look up values from test config files.
     # These keys define the wording of test configs and their internal
     # references.
-    key_log_path = 'logpath'
-    key_testbeds_under_test = 'testbeds_under_test'
-    key_testbed = 'testbed'
-    key_testbed_name = 'name'
+    key_log_path = "logpath"
+    key_testbeds_under_test = "testbeds_under_test"
+    key_testbed = "testbed"
+    key_testbed_name = "name"
     # configpath is the directory. key_config_full_path is the file path.
-    key_config_path = 'configpath'
-    key_config_full_path = 'config_full_path'
-    key_test_paths = 'testpaths'
-    key_port = 'Port'
-    key_address = 'Address'
-    key_test_case_iterations = 'test_case_iterations'
-    key_test_failure_tracebacks = 'test_failure_tracebacks'
+    key_config_path = "configpath"
+    key_config_full_path = "config_full_path"
+    key_test_paths = "testpaths"
+    key_port = "Port"
+    key_address = "Address"
+    key_test_case_iterations = "test_case_iterations"
+    key_test_failure_tracebacks = "test_failure_tracebacks"
     # Config names for controllers packaged in ACTS.
-    key_android_device = 'AndroidDevice'
-    key_bits = 'Bits'
-    key_bluetooth_pts_device = 'BluetoothPtsDevice'
-    key_fuchsia_device = 'FuchsiaDevice'
-    key_buds_device = 'BudsDevice'
-    key_chameleon_device = 'ChameleonDevice'
-    key_native_android_device = 'NativeAndroidDevice'
-    key_relay_device = 'RelayDevice'
-    key_access_point = 'AccessPoint'
-    key_attenuator = 'Attenuator'
-    key_iperf_server = 'IPerfServer'
-    key_iperf_client = 'IPerfClient'
-    key_packet_sender = 'PacketSender'
-    key_monsoon = 'Monsoon'
-    key_sniffer = 'Sniffer'
-    key_arduino_wifi_dongle = 'ArduinoWifiDongle'
-    key_packet_capture = 'PacketCapture'
-    key_pdu = 'PduDevice'
-    key_openwrt_ap = 'OpenWrtAP'
-    key_tigertail = 'Tigertail'
-    key_asus_axe11000_ap = 'AsusAXE11000AP'
+    key_access_point = "AccessPoint"
+    key_android_device = "AndroidDevice"
+    key_attenuator = "Attenuator"
+    key_bluetooth_pts_device = "BluetoothPtsDevice"
+    key_fuchsia_device = "FuchsiaDevice"
+    key_iperf_client = "IPerfClient"
+    key_iperf_server = "IPerfServer"
+    key_openwrt_ap = "OpenWrtAP"
+    key_packet_capture = "PacketCapture"
+    key_packet_sender = "PacketSender"
+    key_pdu = "PduDevice"
+    key_sniffer = "Sniffer"
     # Internal keys, used internally, not exposed to user's config files.
-    ikey_user_param = 'user_params'
-    ikey_testbed_name = 'testbed_name'
-    ikey_logger = 'log'
-    ikey_logpath = 'log_path'
-    ikey_summary_writer = 'summary_writer'
+    ikey_user_param = "user_params"
+    ikey_testbed_name = "testbed_name"
+    ikey_logger = "log"
+    ikey_logpath = "log_path"
+    ikey_summary_writer = "summary_writer"
     # module name of controllers packaged in ACTS.
-    m_key_bits = 'bits'
-    m_key_monsoon = 'monsoon'
-    m_key_android_device = 'android_device'
-    m_key_fuchsia_device = 'fuchsia_device'
-    m_key_bluetooth_pts_device = 'bluetooth_pts_device'
-    m_key_buds_device = 'buds_controller'
-    m_key_chameleon_device = 'chameleon_controller'
-    m_key_native_android_device = 'native_android_device'
-    m_key_relay_device = 'relay_device_controller'
-    m_key_access_point = 'access_point'
-    m_key_attenuator = 'attenuator'
-    m_key_iperf_server = 'iperf_server'
-    m_key_iperf_client = 'iperf_client'
-    m_key_packet_sender = 'packet_sender'
-    m_key_sniffer = 'sniffer'
-    m_key_arduino_wifi_dongle = 'arduino_wifi_dongle'
-    m_key_packet_capture = 'packet_capture'
-    m_key_pdu = 'pdu'
-    m_key_openwrt_ap = 'openwrt_ap'
-    m_key_tigertail = 'tigertail'
-    m_key_asus_axe11000_ap = 'asus_axe11000_ap'
+    m_key_access_point = "access_point"
+    m_key_android_device = "android_device"
+    m_key_attenuator = "attenuator"
+    m_key_bluetooth_pts_device = "bluetooth_pts_device"
+    m_key_fuchsia_device = "fuchsia_device"
+    m_key_iperf_client = "iperf_client"
+    m_key_iperf_server = "iperf_server"
+    m_key_openwrt_ap = "openwrt_ap"
+    m_key_packet_capture = "packet_capture"
+    m_key_packet_sender = "packet_sender"
+    m_key_pdu = "pdu"
+    m_key_sniffer = "sniffer"
 
     # A list of keys whose values in configs should not be passed to test
     # classes without unpacking first.
@@ -96,32 +78,20 @@
 
     # Controller names packaged with ACTS.
     builtin_controller_names = [
+        key_access_point,
         key_android_device,
-        key_bits,
+        key_attenuator,
         key_bluetooth_pts_device,
         key_fuchsia_device,
-        key_buds_device,
-        key_native_android_device,
-        key_relay_device,
-        key_access_point,
-        key_attenuator,
-        key_iperf_server,
         key_iperf_client,
-        key_packet_sender,
-        key_monsoon,
-        key_sniffer,
-        key_chameleon_device,
-        key_arduino_wifi_dongle,
-        key_packet_capture,
-        key_pdu,
+        key_iperf_server,
         key_openwrt_ap,
-        key_tigertail,
-        key_asus_axe11000_ap,
+        key_packet_capture,
+        key_packet_sender,
+        key_pdu,
+        key_sniffer,
     ]
 
-    # Keys that are file or folder paths.
-    file_path_keys = [key_relay_device]
-
 
 def get_name_by_value(value):
     for name, member in Config.__members__.items():
@@ -131,9 +101,8 @@
 
 
 def get_module_name(name_in_config):
-    """Translates the name of a controller in config file to its module name.
-    """
-    return value_to_value(name_in_config, 'm_%s')
+    """Translates the name of a controller in config file to its module name."""
+    return value_to_value(name_in_config, "m_%s")
 
 
 def value_to_value(ref_value, pattern):
diff --git a/src/antlion/libs/logging/log_stream.py b/src/antlion/libs/logging/log_stream.py
index b457e46..27aa077 100644
--- a/src/antlion/libs/logging/log_stream.py
+++ b/src/antlion/libs/logging/log_stream.py
@@ -87,7 +87,6 @@
     }
 # yapf: enable
 
-
 _log_streams = dict()
 _null_handler = logging.NullHandler()
 
@@ -101,9 +100,15 @@
 event_bus.register_subscription(_update_handlers.subscription)
 
 
-def create_logger(name, log_name=None, base_path='', subcontext='',
-                  log_styles=LogStyles.NONE, stream_format=None,
-                  file_format=None):
+def create_logger(
+    name,
+    log_name=None,
+    base_path="",
+    subcontext="",
+    log_styles=LogStyles.NONE,
+    stream_format=None,
+    file_format=None,
+):
     """Creates a Python Logger object with the given attributes.
 
     Creation through this method will automatically manage the logger in the
@@ -130,8 +135,9 @@
     """
     if name in _log_streams:
         _log_streams[name].cleanup()
-    log_stream = _LogStream(name, log_name, base_path, subcontext, log_styles,
-                            stream_format, file_format)
+    log_stream = _LogStream(
+        name, log_name, base_path, subcontext, log_styles, stream_format, file_format
+    )
     _set_logger(log_stream)
     return log_stream.logger
 
@@ -160,6 +166,7 @@
     """FileHandler implementation that allows the output file to be changed
     during operation.
     """
+
     def set_file(self, file_name):
         """Set the target output file to file_name.
 
@@ -179,6 +186,7 @@
     changed during operation. Rotated files will automatically adopt the newest
     output path.
     """
+
     set_file = MovableFileHandler.set_file
 
 
@@ -203,9 +211,16 @@
         file_format: Format used for log output to files
     """
 
-    def __init__(self, name, log_name=None, base_path='', subcontext='',
-                 log_styles=LogStyles.NONE, stream_format=None,
-                 file_format=None):
+    def __init__(
+        self,
+        name,
+        log_name=None,
+        base_path="",
+        subcontext="",
+        log_styles=LogStyles.NONE,
+        stream_format=None,
+        file_format=None,
+    ):
         """Creates a LogStream.
 
         Args:
@@ -235,8 +250,7 @@
         # Add a NullHandler to suppress unwanted console output
         self.logger.addHandler(_null_handler)
         self.logger.propagate = False
-        self.base_path = base_path or getattr(logging, 'log_path',
-                                              '/tmp/acts_logs')
+        self.base_path = base_path or getattr(logging, "log_path", "/tmp/acts_logs")
         self.subcontext = subcontext
         context.TestContext.add_base_output_path(self.logger.name, self.base_path)
         context.TestContext.add_subcontext(self.logger.name, self.subcontext)
@@ -273,8 +287,9 @@
         """
 
         def invalid_style_error(message):
-            raise InvalidStyleSetError('{LogStyle Set: %s} %s' %
-                                       (_log_styles_list, message))
+            raise InvalidStyleSetError(
+                "{LogStyle Set: %s} %s" % (_log_styles_list, message)
+            )
 
         # Store the log locations that have already been set per level.
         levels_dict = {}
@@ -288,39 +303,48 @@
                         if log_style & log_location:
                             if log_location & levels_dict[level]:
                                 invalid_style_error(
-                                    'The log location %s for log level %s has '
-                                    'been set multiple times' %
-                                    (log_location, level))
+                                    "The log location %s for log level %s has "
+                                    "been set multiple times" % (log_location, level)
+                                )
                             else:
                                 levels_dict[level] |= log_location
                     # Check that for a given log-level, not more than one
                     # of MONOLITH_LOG, TESTCLASS_LOG, TESTCASE_LOG is set.
                     locations = levels_dict[level] & LogStyles.ALL_FILE_LOGS
                     valid_locations = [
-                        LogStyles.TESTCASE_LOG, LogStyles.TESTCLASS_LOG,
-                        LogStyles.MONOLITH_LOG, LogStyles.NONE]
+                        LogStyles.TESTCASE_LOG,
+                        LogStyles.TESTCLASS_LOG,
+                        LogStyles.MONOLITH_LOG,
+                        LogStyles.NONE,
+                    ]
                     if locations not in valid_locations:
                         invalid_style_error(
-                            'More than one of MONOLITH_LOG, TESTCLASS_LOG, '
-                            'TESTCASE_LOG is set for log level %s.' % level)
+                            "More than one of MONOLITH_LOG, TESTCLASS_LOG, "
+                            "TESTCASE_LOG is set for log level %s." % level
+                        )
             if log_style & LogStyles.ALL_LEVELS == 0:
-                invalid_style_error('LogStyle %s needs to set a log '
-                                    'level.' % log_style)
+                invalid_style_error(
+                    "LogStyle %s needs to set a log " "level." % log_style
+                )
             if log_style & ~LogStyles.ALL_LEVELS == 0:
-                invalid_style_error('LogStyle %s needs to set a log '
-                                    'location.' % log_style)
+                invalid_style_error(
+                    "LogStyle %s needs to set a log " "location." % log_style
+                )
             if log_style & LogStyles.ROTATE_LOGS and not log_style & (
-                    LogStyles.MONOLITH_LOG | LogStyles.TESTCLASS_LOG |
-                    LogStyles.TESTCASE_LOG):
-                invalid_style_error('LogStyle %s has ROTATE_LOGS set, but does '
-                                    'not specify a log type.' % log_style)
+                LogStyles.MONOLITH_LOG
+                | LogStyles.TESTCLASS_LOG
+                | LogStyles.TESTCASE_LOG
+            ):
+                invalid_style_error(
+                    "LogStyle %s has ROTATE_LOGS set, but does "
+                    "not specify a log type." % log_style
+                )
 
     @staticmethod
     def __create_rotating_file_handler(filename):
         """Generates a callable to create an appropriate RotatingFileHandler."""
         # Magic number explanation: 10485760 == 10MB
-        return MovableRotatingFileHandler(filename, maxBytes=10485760,
-                                          backupCount=5)
+        return MovableRotatingFileHandler(filename, maxBytes=10485760, backupCount=5)
 
     @staticmethod
     def __get_file_handler_creator(log_style):
@@ -363,8 +387,9 @@
         Returns: A FileHandler
         """
         directory = self.__get_current_output_dir(
-            LogStyles.LOCATION_TO_CONTEXT_LEVEL[location])
-        base_name = '%s_%s.txt' % (self.name, LogStyles.LEVEL_NAMES[level])
+            LogStyles.LOCATION_TO_CONTEXT_LEVEL[location]
+        )
+        base_name = "%s_%s.txt" % (self.name, LogStyles.LEVEL_NAMES[level])
         handler = creator(os.path.join(directory, base_name))
         handler.setLevel(LogStyles.LEVEL_TO_NO[level])
         if self.file_format:
@@ -395,8 +420,7 @@
             if not (log_style & log_level and log_location):
                 continue
 
-            handler = self.__create_handler(
-                handler_creator, log_level, log_location)
+            handler = self.__create_handler(handler_creator, log_level, log_location)
             self.logger.addHandler(handler)
 
             if log_style & LogStyles.TESTCLASS_LOG:
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner.py b/src/antlion/libs/ota/ota_runners/ota_runner.py
index 4b20564..05dba4c 100644
--- a/src/antlion/libs/ota/ota_runners/ota_runner.py
+++ b/src/antlion/libs/ota/ota_runners/ota_runner.py
@@ -19,10 +19,8 @@
 
 """The setup time in seconds."""
 SL4A_SERVICE_SETUP_TIME = 5
-
-
 """The path to the metadata found within the OTA package."""
-OTA_PACKAGE_METADATA_PATH = 'META-INF/com/android/metadata'
+OTA_PACKAGE_METADATA_PATH = "META-INF/com/android/metadata"
 
 
 class OtaError(Exception):
@@ -44,40 +42,43 @@
     def _update(self):
         post_build_id = self.get_post_build_id()
         log = self.android_device.log
-        old_info = self.android_device.adb.getprop('ro.build.fingerprint')
-        log.info('Starting Update. Beginning build info: %s', old_info)
-        log.info('Stopping services.')
+        old_info = self.android_device.adb.getprop("ro.build.fingerprint")
+        log.info("Starting Update. Beginning build info: %s", old_info)
+        log.info("Stopping services.")
         self.android_device.stop_services()
-        log.info('Beginning tool.')
+        log.info("Beginning tool.")
         self.ota_tool.update(self)
-        log.info('Tool finished. Waiting for boot completion.')
+        log.info("Tool finished. Waiting for boot completion.")
         self.android_device.wait_for_boot_completion()
-        new_info = self.android_device.adb.getprop('ro.build.fingerprint')
+        new_info = self.android_device.adb.getprop("ro.build.fingerprint")
         if not old_info or old_info == new_info:
-            raise OtaError('The device was not updated to a new build. '
-                           'Previous build: %s. Current build: %s. '
-                           'Expected build: %s' % (old_info, new_info,
-                                                   post_build_id))
-        log.info('Boot completed. Rooting adb.')
+            raise OtaError(
+                "The device was not updated to a new build. "
+                "Previous build: %s. Current build: %s. "
+                "Expected build: %s" % (old_info, new_info, post_build_id)
+            )
+        log.info("Boot completed. Rooting adb.")
         self.android_device.root_adb()
-        log.info('Root complete.')
+        log.info("Root complete.")
         if self.android_device.skip_sl4a:
-            self.android_device.log.info('Skipping SL4A install.')
+            self.android_device.log.info("Skipping SL4A install.")
         else:
             for _ in range(3):
-                self.android_device.log.info('Re-installing SL4A from "%s".',
-                                             self.get_sl4a_apk())
+                self.android_device.log.info(
+                    'Re-installing SL4A from "%s".', self.get_sl4a_apk()
+                )
                 self.android_device.adb.install(
-                    '-r -g %s' % self.get_sl4a_apk(), ignore_status=True)
+                    "-r -g %s" % self.get_sl4a_apk(), ignore_status=True
+                )
                 time.sleep(SL4A_SERVICE_SETUP_TIME)
                 if self.android_device.is_sl4a_installed():
                     break
-        log.info('Starting services.')
+        log.info("Starting services.")
         self.android_device.start_services()
         self.android_device.update_sdk_api_level()
-        log.info('Services started. Running ota tool cleanup.')
+        log.info("Services started. Running ota tool cleanup.")
         self.ota_tool.cleanup(self)
-        log.info('Cleanup complete.')
+        log.info("Cleanup complete.")
 
     def get_ota_package_metadata(self, requested_field):
         """Returns a variable found within the OTA package's metadata.
@@ -87,15 +88,15 @@
 
         Will return None if the variable cannot be found.
         """
-        ota_zip = ZipFile(self.get_ota_package(), 'r')
+        ota_zip = ZipFile(self.get_ota_package(), "r")
         if OTA_PACKAGE_METADATA_PATH in ota_zip.namelist():
             with ota_zip.open(OTA_PACKAGE_METADATA_PATH) as metadata:
-                timestamp_line = requested_field.encode('utf-8')
+                timestamp_line = requested_field.encode("utf-8")
                 timestamp_offset = len(timestamp_line) + 1
 
                 for line in metadata.readlines():
                     if line.startswith(timestamp_line):
-                        return line[timestamp_offset:].decode('utf-8').strip()
+                        return line[timestamp_offset:].decode("utf-8").strip()
         return None
 
     def validate_update(self):
@@ -106,28 +107,36 @@
                 validated.
         """
         # The timestamp the current device build was created at.
-        cur_img_timestamp = self.android_device.adb.getprop('ro.build.date.utc')
-        ota_img_timestamp = self.get_ota_package_metadata('post-timestamp')
+        cur_img_timestamp = self.android_device.adb.getprop("ro.build.date.utc")
+        ota_img_timestamp = self.get_ota_package_metadata("post-timestamp")
 
         if ota_img_timestamp is None:
-            raise InvalidOtaUpdateError('Unable to find the timestamp '
-                                        'for the OTA build.')
+            raise InvalidOtaUpdateError(
+                "Unable to find the timestamp " "for the OTA build."
+            )
 
         try:
             if int(ota_img_timestamp) <= int(cur_img_timestamp):
                 cur_fingerprint = self.android_device.adb.getprop(
-                    'ro.bootimage.build.fingerprint')
+                    "ro.bootimage.build.fingerprint"
+                )
                 ota_fingerprint = self.get_post_build_id()
                 raise InvalidOtaUpdateError(
-                    'The OTA image comes from an earlier build than the '
-                    'source build. Current build: Time: %s -- %s, '
-                    'OTA build: Time: %s -- %s' %
-                    (cur_img_timestamp, cur_fingerprint,
-                     ota_img_timestamp, ota_fingerprint))
+                    "The OTA image comes from an earlier build than the "
+                    "source build. Current build: Time: %s -- %s, "
+                    "OTA build: Time: %s -- %s"
+                    % (
+                        cur_img_timestamp,
+                        cur_fingerprint,
+                        ota_img_timestamp,
+                        ota_fingerprint,
+                    )
+                )
         except ValueError:
             raise InvalidOtaUpdateError(
-                'Unable to parse timestamps. Current timestamp: %s, OTA '
-                'timestamp: %s' % (ota_img_timestamp, cur_img_timestamp))
+                "Unable to parse timestamps. Current timestamp: %s, OTA "
+                "timestamp: %s" % (ota_img_timestamp, cur_img_timestamp)
+            )
 
     def get_post_build_id(self):
         """Returns the post-build ID found within the OTA package metadata.
@@ -135,7 +144,7 @@
         Raises:
             InvalidOtaUpdateError if the post-build ID cannot be found.
         """
-        return self.get_ota_package_metadata('post-build')
+        return self.get_ota_package_metadata("post-build")
 
     def can_update(self):
         """Whether or not an update package is available for the device."""
@@ -168,8 +177,9 @@
     def update(self):
         """Starts the update process."""
         if not self.can_update():
-            raise OtaError('A SingleUseOtaTool instance cannot update a device '
-                           'multiple times.')
+            raise OtaError(
+                "A SingleUseOtaTool instance cannot update a device " "multiple times."
+            )
         self._called = True
         self._update()
 
@@ -201,8 +211,10 @@
     def update(self):
         """Starts the update process."""
         if not self.can_update():
-            raise OtaError('This MultiUseOtaRunner has already updated all '
-                           'given packages onto the phone.')
+            raise OtaError(
+                "This MultiUseOtaRunner has already updated all "
+                "given packages onto the phone."
+            )
         self._update()
         self.current_update_number += 1
 
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py b/src/antlion/libs/ota/ota_runners/ota_runner_factory.py
index 311b045..a5622da 100644
--- a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py
+++ b/src/antlion/libs/ota/ota_runners/ota_runner_factory.py
@@ -24,7 +24,7 @@
 _bound_devices = {}
 
 DEFAULT_OTA_TOOL = adb_sideload_ota_tool.AdbSideloadOtaTool.__name__
-DEFAULT_OTA_COMMAND = 'adb'
+DEFAULT_OTA_COMMAND = "adb"
 
 
 def create_all_from_configs(config, android_devices):
@@ -63,17 +63,19 @@
     # Default to adb sideload
     try:
         ota_tool_class_name = get_ota_value_from_config(
-            config, 'ota_tool', android_device)
+            config, "ota_tool", android_device
+        )
     except ActsConfigError:
         ota_tool_class_name = DEFAULT_OTA_TOOL
 
     if ota_tool_class_name not in config:
         if ota_tool_class_name is not DEFAULT_OTA_TOOL:
             raise ActsConfigError(
-                'If the ota_tool is overloaded, the path to the tool must be '
+                "If the ota_tool is overloaded, the path to the tool must be "
                 'added to the ACTS config file under {"OtaToolName": '
-                '"path/to/tool"} (in this case, {"%s": "path/to/tool"}.' %
-                ota_tool_class_name)
+                '"path/to/tool"} (in this case, {"%s": "path/to/tool"}.'
+                % ota_tool_class_name
+            )
         else:
             command = DEFAULT_OTA_COMMAND
     else:
@@ -85,26 +87,28 @@
             else:
                 raise ActsConfigError(
                     'Config value for "%s" must be either a string or a list '
-                    'of exactly one element' % ota_tool_class_name)
+                    "of exactly one element" % ota_tool_class_name
+                )
 
-    ota_package = get_ota_value_from_config(config, 'ota_package',
-                                            android_device)
-    ota_sl4a = get_ota_value_from_config(config, 'ota_sl4a', android_device)
+    ota_package = get_ota_value_from_config(config, "ota_package", android_device)
+    ota_sl4a = get_ota_value_from_config(config, "ota_sl4a", android_device)
     if type(ota_sl4a) != type(ota_package):
         raise ActsConfigError(
-            'The ota_package and ota_sl4a must either both be strings, or '
-            'both be lists. Device with serial "%s" has mismatched types.' %
-            android_device.serial)
-    return create(ota_package, ota_sl4a, android_device, ota_tool_class_name,
-                  command)
+            "The ota_package and ota_sl4a must either both be strings, or "
+            'both be lists. Device with serial "%s" has mismatched types.'
+            % android_device.serial
+        )
+    return create(ota_package, ota_sl4a, android_device, ota_tool_class_name, command)
 
 
-def create(ota_package,
-           ota_sl4a,
-           android_device,
-           ota_tool_class_name=DEFAULT_OTA_TOOL,
-           command=DEFAULT_OTA_COMMAND,
-           use_cached_runners=True):
+def create(
+    ota_package,
+    ota_sl4a,
+    android_device,
+    ota_tool_class_name=DEFAULT_OTA_TOOL,
+    command=DEFAULT_OTA_COMMAND,
+    use_cached_runners=True,
+):
     """
     Args:
         ota_package: A string or list of strings corresponding to the
@@ -121,15 +125,14 @@
         An OtaRunner with the given properties from the arguments.
     """
     ota_tool = ota_tool_factory.create(ota_tool_class_name, command)
-    return create_from_package(ota_package, ota_sl4a, android_device, ota_tool,
-                               use_cached_runners)
+    return create_from_package(
+        ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners
+    )
 
 
-def create_from_package(ota_package,
-                        ota_sl4a,
-                        android_device,
-                        ota_tool,
-                        use_cached_runners=True):
+def create_from_package(
+    ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners=True
+):
     """
     Args:
         ota_package: A string or list of strings corresponding to the
@@ -145,25 +148,32 @@
         An OtaRunner with the given properties from the arguments.
     """
     if android_device in _bound_devices and use_cached_runners:
-        logging.warning('Android device %s has already been assigned an '
-                        'OtaRunner. Returning previously created runner.')
+        logging.warning(
+            "Android device %s has already been assigned an "
+            "OtaRunner. Returning previously created runner."
+        )
         return _bound_devices[android_device]
 
     if type(ota_package) != type(ota_sl4a):
         raise TypeError(
-            'The ota_package and ota_sl4a must either both be strings, or '
+            "The ota_package and ota_sl4a must either both be strings, or "
             'both be lists. Device with serial "%s" has requested mismatched '
-            'types.' % android_device.serial)
+            "types." % android_device.serial
+        )
 
     if type(ota_package) is str:
-        runner = ota_runner.SingleUseOtaRunner(ota_tool, android_device,
-                                               ota_package, ota_sl4a)
+        runner = ota_runner.SingleUseOtaRunner(
+            ota_tool, android_device, ota_package, ota_sl4a
+        )
     elif type(ota_package) is list:
-        runner = ota_runner.MultiUseOtaRunner(ota_tool, android_device,
-                                              ota_package, ota_sl4a)
+        runner = ota_runner.MultiUseOtaRunner(
+            ota_tool, android_device, ota_package, ota_sl4a
+        )
     else:
-        raise TypeError('The "ota_package" value in the acts config must be '
-                        'either a list or a string.')
+        raise TypeError(
+            'The "ota_package" value in the acts config must be '
+            "either a list or a string."
+        )
 
     _bound_devices[android_device] = runner
     return runner
@@ -180,25 +190,26 @@
     Returns: The value at the specified key.
     Throws: ActsConfigError if the value cannot be determined from the config.
     """
-    suffix = ''
-    if 'ota_map' in config:
-        if android_device.serial in config['ota_map']:
-            suffix = '_%s' % config['ota_map'][android_device.serial]
+    suffix = ""
+    if "ota_map" in config:
+        if android_device.serial in config["ota_map"]:
+            suffix = "_%s" % config["ota_map"][android_device.serial]
 
-    ota_package_key = '%s%s' % (key, suffix)
+    ota_package_key = "%s%s" % (key, suffix)
     if ota_package_key not in config:
-        if suffix != '':
+        if suffix != "":
             raise ActsConfigError(
-                'Asked for an OTA Update without specifying a required value. '
+                "Asked for an OTA Update without specifying a required value. "
                 '"ota_map" has entry {"%s": "%s"}, but there is no '
                 'corresponding entry {"%s":"/path/to/file"} found within the '
-                'ACTS config.' % (android_device.serial, suffix[1:],
-                                  ota_package_key))
+                "ACTS config." % (android_device.serial, suffix[1:], ota_package_key)
+            )
         else:
             raise ActsConfigError(
-                'Asked for an OTA Update without specifying a required value. '
+                "Asked for an OTA Update without specifying a required value. "
                 '"ota_map" does not exist or have a key for serial "%s", and '
                 'the default value entry "%s" cannot be found within the ACTS '
-                'config.' % (android_device.serial, ota_package_key))
+                "config." % (android_device.serial, ota_package_key)
+            )
 
     return config[ota_package_key]
diff --git a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py b/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
index 5b45241..f097f45 100644
--- a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
+++ b/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
@@ -32,17 +32,16 @@
         super(AdbSideloadOtaTool, self).__init__(ignored_command)
 
     def update(self, ota_runner):
-        logging.info('Rooting adb')
+        logging.info("Rooting adb")
         ota_runner.android_device.root_adb()
-        logging.info('Rebooting to sideload')
-        ota_runner.android_device.adb.reboot('sideload')
+        logging.info("Rebooting to sideload")
+        ota_runner.android_device.adb.reboot("sideload")
         ota_runner.android_device.adb.wait_for_sideload()
-        logging.info('Sideloading ota package')
+        logging.info("Sideloading ota package")
         package_path = ota_runner.get_ota_package()
         logging.info('Running adb sideload with package "%s"' % package_path)
-        ota_runner.android_device.adb.sideload(
-            package_path, timeout=PUSH_TIMEOUT)
-        logging.info('Sideload complete. Waiting for device to come back up.')
+        ota_runner.android_device.adb.sideload(package_path, timeout=PUSH_TIMEOUT)
+        logging.info("Sideload complete. Waiting for device to come back up.")
         ota_runner.android_device.adb.wait_for_recovery()
         ota_runner.android_device.reboot(stop_at_lock_screen=True)
-        logging.info('Device is up. Update complete.')
+        logging.info("Device is up. Update complete.")
diff --git a/src/antlion/libs/ota/ota_tools/ota_tool_factory.py b/src/antlion/libs/ota/ota_tools/ota_tool_factory.py
index c889ddc..0eff707 100644
--- a/src/antlion/libs/ota/ota_tools/ota_tool_factory.py
+++ b/src/antlion/libs/ota/ota_tools/ota_tool_factory.py
@@ -41,10 +41,12 @@
         return _constructed_tools[ota_tool_class]
 
     if ota_tool_class not in _CONSTRUCTORS:
-        raise KeyError('Given Ota Tool class name does not match a known '
-                       'name. Found "%s". Expected any of %s. If this tool '
-                       'does exist, add it to the _CONSTRUCTORS dict in this '
-                       'module.' % (ota_tool_class, _CONSTRUCTORS.keys()))
+        raise KeyError(
+            "Given Ota Tool class name does not match a known "
+            'name. Found "%s". Expected any of %s. If this tool '
+            "does exist, add it to the _CONSTRUCTORS dict in this "
+            "module." % (ota_tool_class, _CONSTRUCTORS.keys())
+        )
 
     new_update_tool = _CONSTRUCTORS[ota_tool_class](command)
     _constructed_tools[ota_tool_class] = new_update_tool
diff --git a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py b/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
index 03d10c5..4bdde99 100644
--- a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
+++ b/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
@@ -26,31 +26,35 @@
 # OTA Packages can be upwards of 1 GB. This may take some time to transfer over
 # USB 2.0. A/B devices must also complete the update in the background.
 UPDATE_TIMEOUT = 60 * 60
-UPDATE_LOCATION = '/data/ota_package/update.zip'
+UPDATE_LOCATION = "/data/ota_package/update.zip"
 
 
 class UpdateDeviceOtaTool(ota_tool.OtaTool):
     """Runs an OTA Update with system/update_engine/scripts/update_device.py."""
+
     def __init__(self, command):
         super(UpdateDeviceOtaTool, self).__init__(command)
 
         self.unzip_path = tempfile.mkdtemp()
         utils.unzip_maintain_permissions(self.command, self.unzip_path)
 
-        self.command = os.path.join(self.unzip_path, 'update_device.py')
+        self.command = os.path.join(self.unzip_path, "update_device.py")
 
     def update(self, ota_runner):
-        logging.info('Forcing adb to be in root mode.')
+        logging.info("Forcing adb to be in root mode.")
         ota_runner.android_device.root_adb()
-        update_command = 'python3 %s -s %s %s' % (
-            self.command, ota_runner.serial, ota_runner.get_ota_package())
-        logging.info('Running %s' % update_command)
+        update_command = "python3 %s -s %s %s" % (
+            self.command,
+            ota_runner.serial,
+            ota_runner.get_ota_package(),
+        )
+        logging.info("Running %s" % update_command)
         result = job.run(update_command, timeout=UPDATE_TIMEOUT)
-        logging.info('Output: %s' % result.stdout)
+        logging.info("Output: %s" % result.stdout)
 
-        logging.info('Rebooting device for update to go live.')
+        logging.info("Rebooting device for update to go live.")
         ota_runner.android_device.reboot(stop_at_lock_screen=True)
-        logging.info('Reboot sent.')
+        logging.info("Reboot sent.")
 
     def __del__(self):
         """Delete the unzipped update_device folder before ACTS exits."""
diff --git a/src/antlion/libs/ota/ota_updater.py b/src/antlion/libs/ota/ota_updater.py
index 1e434b3..6db9649 100644
--- a/src/antlion/libs/ota/ota_updater.py
+++ b/src/antlion/libs/ota/ota_updater.py
@@ -29,16 +29,17 @@
         android_devices: The android_devices in the test.
     """
     for ad in android_devices:
-        ota_runners[ad] = ota_runner_factory.create_from_configs(
-            user_params, ad)
+        ota_runners[ad] = ota_runner_factory.create_from_configs(user_params, ad)
 
 
 def _check_initialization(android_device):
     """Check if a given device was initialized."""
     if android_device not in ota_runners:
-        raise KeyError('Android Device with serial "%s" has not been '
-                       'initialized for OTA Updates. Did you forget to call'
-                       'ota_updater.initialize()?' % android_device.serial)
+        raise KeyError(
+            'Android Device with serial "%s" has not been '
+            "initialized for OTA Updates. Did you forget to call"
+            "ota_updater.initialize()?" % android_device.serial
+        )
 
 
 def update(android_device, ignore_update_errors=False):
@@ -60,8 +61,7 @@
         if ignore_update_errors:
             return
         android_device.log.error(e)
-        android_device.take_bug_report('ota_update',
-                                       utils.get_current_epoch_time())
+        android_device.take_bug_report("ota_update", utils.get_current_epoch_time())
         raise e
 
 
diff --git a/src/antlion/libs/proc/job.py b/src/antlion/libs/proc/job.py
index b17d904..c1cdc24 100644
--- a/src/antlion/libs/proc/job.py
+++ b/src/antlion/libs/proc/job.py
@@ -14,14 +14,9 @@
 
 import logging
 import os
-import sys
+import subprocess
 import time
 
-if os.name == 'posix' and sys.version_info[0] < 3:
-    import subprocess32 as subprocess
-else:
-    import subprocess
-
 
 class Error(Exception):
     """Indicates that a command failed, is fatal to the test unless caught."""
@@ -54,8 +49,9 @@
     def stdout(self):
         """String representation of standard output."""
         if not self._stdout_str:
-            self._stdout_str = self._raw_stdout.decode(encoding=self._encoding,
-                                                       errors='replace')
+            self._stdout_str = self._raw_stdout.decode(
+                encoding=self._encoding, errors="replace"
+            )
             self._stdout_str = self._stdout_str.strip()
         return self._stdout_str
 
@@ -63,19 +59,22 @@
     def stderr(self):
         """String representation of standard error."""
         if not self._stderr_str:
-            self._stderr_str = self._raw_stderr.decode(encoding=self._encoding,
-                                                       errors='replace')
+            self._stderr_str = self._raw_stderr.decode(
+                encoding=self._encoding, errors="replace"
+            )
             self._stderr_str = self._stderr_str.strip()
         return self._stderr_str
 
-    def __init__(self,
-                 command=[],
-                 stdout=bytes(),
-                 stderr=bytes(),
-                 exit_status=None,
-                 duration=0,
-                 did_timeout=False,
-                 encoding='utf-8'):
+    def __init__(
+        self,
+        command=[],
+        stdout=bytes(),
+        stderr=bytes(),
+        exit_status=None,
+        duration=0,
+        did_timeout=False,
+        encoding="utf-8",
+    ):
         """
         Args:
             command: The command that was run. This will be a list containing
@@ -98,18 +97,21 @@
         self.did_timeout = did_timeout
 
     def __repr__(self):
-        return ('job.Result(command=%r, stdout=%r, stderr=%r, exit_status=%r, '
-                'duration=%r, did_timeout=%r, encoding=%r)') % (
-                    self.command, self._raw_stdout, self._raw_stderr,
-                    self.exit_status, self.duration, self.did_timeout,
-                    self._encoding)
+        return (
+            "job.Result(command=%r, stdout=%r, stderr=%r, exit_status=%r, "
+            "duration=%r, did_timeout=%r, encoding=%r)"
+        ) % (
+            self.command,
+            self._raw_stdout,
+            self._raw_stderr,
+            self.exit_status,
+            self.duration,
+            self.did_timeout,
+            self._encoding,
+        )
 
 
-def run(command,
-        timeout=60,
-        ignore_status=False,
-        env=None,
-        io_encoding='utf-8'):
+def run(command, timeout=60, ignore_status=False, env=None, io_encoding="utf-8"):
     """Execute a command in a subproccess and return its output.
 
     Commands can be either shell commands (given as strings) or the
@@ -133,11 +135,13 @@
         Error: When the command had an error executing and ignore_status==False.
     """
     start_time = time.time()
-    proc = subprocess.Popen(command,
-                            env=env,
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE,
-                            shell=not isinstance(command, list))
+    proc = subprocess.Popen(
+        command,
+        env=env,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        shell=not isinstance(command, list),
+    )
     # Wait on the process terminating
     timed_out = False
     out = bytes()
@@ -149,18 +153,19 @@
         proc.kill()
         proc.wait()
 
-    result = Result(command=command,
-                    stdout=out,
-                    stderr=err,
-                    exit_status=proc.returncode,
-                    duration=time.time() - start_time,
-                    encoding=io_encoding,
-                    did_timeout=timed_out)
+    result = Result(
+        command=command,
+        stdout=out,
+        stderr=err,
+        exit_status=proc.returncode,
+        duration=time.time() - start_time,
+        encoding=io_encoding,
+        did_timeout=timed_out,
+    )
     logging.debug(result)
 
     if timed_out:
-        logging.error("Command %s with %s timeout setting timed out", command,
-                      timeout)
+        logging.error("Command %s with %s timeout setting timed out", command, timeout)
         raise TimeoutError(result)
 
     if not ignore_status and proc.returncode != 0:
@@ -187,11 +192,13 @@
         A subprocess.Popen object representing the created subprocess.
 
     """
-    proc = subprocess.Popen(command,
-                            env=env,
-                            preexec_fn=os.setpgrp,
-                            shell=not isinstance(command, list),
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.STDOUT)
+    proc = subprocess.Popen(
+        command,
+        env=env,
+        preexec_fn=os.setpgrp,
+        shell=not isinstance(command, list),
+        stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT,
+    )
     logging.debug("command %s started with pid %s", command, proc.pid)
     return proc
diff --git a/src/antlion/libs/proc/process.py b/src/antlion/libs/proc/process.py
index 906be73..9a3bbcd 100644
--- a/src/antlion/libs/proc/process.py
+++ b/src/antlion/libs/proc/process.py
@@ -23,7 +23,7 @@
 import time
 from threading import Thread
 
-_on_windows = sys.platform == 'win32'
+_on_windows = sys.platform == "win32"
 
 
 class ProcessError(Exception):
@@ -54,23 +54,24 @@
         process, use Process.start().
         """
         # Split command string into list if shell=True is not specified
-        self._use_shell = kwargs.get('shell', False)
+        self._use_shell = kwargs.get("shell", False)
         if not self._use_shell and isinstance(command, str):
             command = shlex.split(command)
         self._command = command
         self._subprocess_kwargs = kwargs
         if _on_windows:
-            self._subprocess_kwargs['creationflags'] = (
-                subprocess.CREATE_NEW_PROCESS_GROUP)
+            self._subprocess_kwargs[
+                "creationflags"
+            ] = subprocess.CREATE_NEW_PROCESS_GROUP
         else:
-            self._subprocess_kwargs['start_new_session'] = True
+            self._subprocess_kwargs["start_new_session"] = True
         self._process = None
 
         self._listening_thread = None
         self._redirection_thread = None
         self._on_output_callback = lambda *args, **kw: None
         self._binary_output = False
-        self._on_terminate_callback = lambda *args, **kw: ''
+        self._on_terminate_callback = lambda *args, **kw: ""
 
         self._started = False
         self._stopped = False
@@ -117,7 +118,7 @@
     def start(self):
         """Starts the process's execution."""
         if self._started:
-            raise ProcessError('Process has already started.')
+            raise ProcessError("Process has already started.")
         self._started = True
         self._process = None
 
@@ -128,13 +129,13 @@
 
         while self._process is None:
             if time.time() > time_up_at:
-                raise OSError('Unable to open process!')
+                raise OSError("Unable to open process!")
 
         self._stopped = False
 
     @staticmethod
     def _get_timeout_left(timeout, start_time):
-        return max(.1, timeout - (time.time() - start_time))
+        return max(0.1, timeout - (time.time() - start_time))
 
     def is_running(self):
         """Checks that the underlying Popen process is still running
@@ -158,7 +159,7 @@
         """Kills the underlying process/process group. Implementation is
         platform-dependent."""
         if _on_windows:
-            subprocess.check_call('taskkill /F /T /PID %s' % self._process.pid)
+            subprocess.check_call("taskkill /F /T /PID %s" % self._process.pid)
         else:
             self.signal(signal.SIGKILL)
 
@@ -175,7 +176,7 @@
             kill_timeout: The amount of time to wait until killing the process.
         """
         if self._stopped:
-            raise ProcessError('Process is already being stopped.')
+            raise ProcessError("Process is already being stopped.")
         self._stopped = True
 
         try:
@@ -193,7 +194,7 @@
             sig: The signal to be sent.
         """
         if _on_windows:
-            raise ProcessError('Unable to call Process.signal on windows.')
+            raise ProcessError("Unable to call Process.signal on windows.")
 
         pgid = os.getpgid(self._process.pid)
         os.killpg(pgid, sig)
@@ -221,8 +222,7 @@
                     self._on_output_callback(data)
         else:
             while True:
-                line = self._process.stdout.readline().decode('utf-8',
-                                                              errors='replace')
+                line = self._process.stdout.readline().decode("utf-8", errors="replace")
 
                 if not line:
                     return
@@ -234,8 +234,7 @@
     def __start_process(command, **kwargs):
         """A convenient wrapper function for starting the process."""
         acts_logger = logging.getLogger()
-        acts_logger.debug(
-            'Starting command "%s" with kwargs %s', command, kwargs)
+        acts_logger.debug('Starting command "%s" with kwargs %s', command, kwargs)
         return subprocess.Popen(command, **kwargs)
 
     def _exec_loop(self):
@@ -249,27 +248,26 @@
         """
         command = self._command
         while True:
-            self._process = self.__start_process(command,
-                                                 stdout=subprocess.PIPE,
-                                                 stderr=subprocess.STDOUT,
-                                                 bufsize=1,
-                                                 **self._subprocess_kwargs)
+            self._process = self.__start_process(
+                command,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.STDOUT,
+                bufsize=1,
+                **self._subprocess_kwargs,
+            )
             self._redirection_thread = Thread(target=self._redirect_output)
             self._redirection_thread.start()
             self._process.wait()
 
             if self._stopped:
-                logging.debug('The process for command %s was stopped.',
-                              command)
+                logging.debug("The process for command %s was stopped.", command)
                 break
             else:
-                logging.debug('The process for command %s terminated.',
-                              command)
+                logging.debug("The process for command %s terminated.", command)
                 # Wait for all output to be processed before sending
                 # _on_terminate_callback()
                 self._redirection_thread.join()
-                logging.debug('Beginning on_terminate_callback for %s.',
-                              command)
+                logging.debug("Beginning on_terminate_callback for %s.", command)
                 retry_value = self._on_terminate_callback(self._process)
                 if retry_value:
                     if not self._use_shell and isinstance(retry_value, str):
diff --git a/src/antlion/libs/test_binding/__init__.py b/src/antlion/libs/test_binding/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/libs/test_binding/__init__.py
+++ /dev/null
diff --git a/src/antlion/libs/test_binding/all_tests_decorator.py b/src/antlion/libs/test_binding/all_tests_decorator.py
deleted file mode 100644
index 906ac6b..0000000
--- a/src/antlion/libs/test_binding/all_tests_decorator.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import inspect
-
-
-def for_all_tests(decorator):
-    """Applies a decorator to all tests within a test class.
-
-    Args:
-        decorator: The decorator to apply.
-
-    Returns:
-        The class decorator function.
-    """
-
-    def _decorate(decorated):
-        test_names = []
-        for name, value in inspect.getmembers(decorated,
-                                              predicate=inspect.isfunction):
-            if name.startswith("test_"):
-                test_names.append(name)
-
-        for test_name in test_names:
-            setattr(decorated, test_name,
-                    decorator(getattr(decorated, test_name)))
-
-        return decorated
-
-    return _decorate
diff --git a/src/antlion/libs/test_binding/binding.py b/src/antlion/libs/test_binding/binding.py
deleted file mode 100644
index df6387b..0000000
--- a/src/antlion/libs/test_binding/binding.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from antlion import signals
-
-
-class Binding(object):
-    """Creates a binding for a test method with a decorator.
-
-    Python stores all functions as a variable bound to an object. When that
-    object is called it will execute the function logic. It is possible to
-    create a wrapper object around the real function object to perform custom
-    logic and store additional meta-data.
-
-    This object acts as a wrapper for test functions. It allows binding
-    additional test logic to a test.
-    """
-
-    def __init__(self, inner, arg_modifier=None, before=None, after=None,
-                 signal_modifier=None, instance_args=None):
-        """
-        Args:
-            inner: The inner method or other binding being bound to.
-            arg_modifier: A function of
-                (*args, **kwargs) => args kwargs that will modify the
-                arguments to pass to the bound target
-            before: A function of (*args, **kwargs) => None that will
-                be called before the bound target.
-            after: A function of (result, *args, **kwargs) => None
-                that will be called after the bound target.
-            signal_modifier:  A function of
-                (signal, *args, **kwargs) => signal that will be
-                called before the signal is sent to modify the signal to send.
-        """
-        self.instance_args = instance_args or []
-        self.arg_modifier = arg_modifier
-        self.signal_modifier = signal_modifier
-        self.after = after
-        self.before = before
-        self.inner = inner
-        self.__name__ = inner.__name__
-
-    def __get__(self, instance, owner):
-        """Called when a new isntance of the test class is created.
-
-        When a new instance of a class is created all method bindings must
-        be bound as instance bindings. This transforms the function call
-        signature to be func(self, *args, **kwargs) to func(*args, **kwargs).
-        The newly created binding handles inserting the self variable so the
-        caller does not have to.
-
-        This binding needs to do similar logic by creating a new binding for
-        the instance that memorizes the instance as a passed in arg.
-        """
-        return Binding(self.inner,
-                       arg_modifier=self.arg_modifier,
-                       before=self.before,
-                       after=self.after,
-                       signal_modifier=self.signal_modifier,
-                       instance_args=[instance] + self.instance_args)
-
-    def __call__(self, *args, **kwargs):
-        """Called when the test is executed."""
-        full_args = self.instance_args + list(args)
-
-        try:
-            if self.arg_modifier:
-                full_args, kwargs = self.arg_modifier(self.inner, *full_args,
-                                                      **kwargs)
-
-            if self.before:
-                self.before(self.inner, *full_args, **kwargs)
-
-            result = 'UNKNOWN ERROR'
-            try:
-                result = self.inner(*full_args, **kwargs)
-            finally:
-                if self.after:
-                    self.after(self.inner, result, *full_args, **kwargs)
-
-            if result or result is None:
-                new_signal = signals.TestPass('')
-            else:
-                new_signal = signals.TestFailure('')
-        except signals.TestSignal as signal:
-            new_signal = signal
-
-        if self.signal_modifier:
-            new_signal = self.signal_modifier(self.inner, new_signal,
-                                              *full_args,
-                                              **kwargs)
-
-        raise new_signal
-
-    def __getattr__(self, item):
-        """A simple pass through for any variable we do not known about."""
-        return getattr(self.inner, item)
diff --git a/src/antlion/libs/uicd/__init__.py b/src/antlion/libs/uicd/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/libs/uicd/__init__.py
+++ /dev/null
diff --git a/src/antlion/libs/uicd/uicd_cli.py b/src/antlion/libs/uicd/uicd_cli.py
deleted file mode 100644
index 3ca9bf4..0000000
--- a/src/antlion/libs/uicd/uicd_cli.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import shutil
-import tempfile
-
-from antlion import logger
-from antlion.libs.proc import job
-
-_UICD_JAR_CMD = 'java -jar %s/uicd-commandline.jar'
-_UNZIP_CMD = 'tar -xzf %s -C %s'
-
-
-class UicdError(Exception):
-    """Raised for exceptions that occur in UIConductor-related tasks"""
-
-
-class UicdCli(object):
-    """Provides an interface for running UIConductor (Uicd) workflows under its
-    CLI.
-
-    This class does not handle workflow creation, which requires the Uicd
-    frontend.
-    """
-    def __init__(self, uicd_zip, workflow_paths, log_path=None):
-        """Creates a UicdCli object. Extracts the required uicd-cli binaries.
-
-        Args:
-            uicd_zip: The path to uicd_cli.tar.gz
-            workflow_paths: List of paths to uicd workflows and/or directories
-                containing them.
-            log_path: Directory for storing logs generated by Uicd.
-        """
-        # This is done so unit tests can cache the mocked shutil.rmtree value
-        # and call it on __del__ when the patch has been lifted.
-        self._rm_tmpdir = shutil.rmtree
-
-        self._uicd_zip = uicd_zip[0] if isinstance(uicd_zip, list) else uicd_zip
-        self._uicd_path = tempfile.mkdtemp(prefix='uicd')
-        self._log_path = log_path
-        if self._log_path:
-            os.makedirs(self._log_path, exist_ok=True)
-        self._log = logger.create_tagged_trace_logger(tag='Uicd')
-        self._set_workflows(workflow_paths)
-        self._setup_cli()
-
-    def _set_workflows(self, workflow_paths):
-        """Set up a dictionary that maps workflow name to its file location.
-        This allows the user to specify workflows to run without having to
-        provide the full path.
-
-        Args:
-            workflow_paths: List of paths to uicd workflows and/or directories
-                containing them.
-
-        Raises:
-            UicdError if two or more Uicd workflows share the same file name
-        """
-        if isinstance(workflow_paths, str):
-            workflow_paths = [workflow_paths]
-
-        # get a list of workflow files from specified paths
-        def _raise(e):
-            raise e
-        workflow_files = []
-        for path in workflow_paths:
-            if os.path.isfile(path):
-                workflow_files.append(path)
-            else:
-                for (root, _, files) in os.walk(path, onerror=_raise):
-                    for file in files:
-                        workflow_files.append(os.path.join(root, file))
-
-        # populate the dictionary
-        self._workflows = {}
-        for path in workflow_files:
-            workflow_name = os.path.basename(path)
-            if workflow_name in self._workflows.keys():
-                raise UicdError('Uicd workflows may not share the same name.')
-            self._workflows[workflow_name] = path
-
-    def _setup_cli(self):
-        """Extract tar from uicd_zip and place unzipped files in uicd_path.
-
-        Raises:
-            Exception if the extraction fails.
-        """
-        self._log.debug('Extracting uicd-cli binaries from %s' % self._uicd_zip)
-        unzip_cmd = _UNZIP_CMD % (self._uicd_zip, self._uicd_path)
-        try:
-            job.run(unzip_cmd.split())
-        except job.Error:
-            self._log.exception('Failed to extract uicd-cli binaries.')
-            raise
-
-    def run(self, serial, workflows, timeout=120):
-        """Run specified workflows on the UIConductor CLI.
-
-        Args:
-            serial: Device serial
-            workflows: List or str of workflows to run.
-            timeout: Number seconds to wait for command to finish.
-        """
-        base_cmd = _UICD_JAR_CMD % self._uicd_path
-        if isinstance(workflows, str):
-            workflows = [workflows]
-        for workflow_name in workflows:
-            self._log.info('Running workflow "%s"' % workflow_name)
-            if workflow_name in self._workflows:
-                args = '-d %s -i %s' % (serial, self._workflows[workflow_name])
-            else:
-                self._log.error(
-                    'The workflow "%s" does not exist.' % workflow_name)
-                continue
-            if self._log_path:
-                args = '%s -o %s' % (args, self._log_path)
-            cmd = '%s %s' % (base_cmd, args)
-            try:
-                result = job.run(cmd.split(), timeout=timeout)
-            except job.Error:
-                self._log.exception(
-                    'Failed to run workflow "%s"' % workflow_name)
-                continue
-            if result.stdout:
-                stdout_split = result.stdout.splitlines()
-                if len(stdout_split) > 2:
-                    self._log.debug('Uicd logs stored at %s' % stdout_split[2])
-
-    def __del__(self):
-        """Delete the temp directory to Uicd CLI binaries upon ACTS exit."""
-        self._rm_tmpdir(self._uicd_path)
diff --git a/src/antlion/libs/utils/multithread.py b/src/antlion/libs/utils/multithread.py
deleted file mode 100644
index 31baaf7..0000000
--- a/src/antlion/libs/utils/multithread.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import concurrent.futures
-import logging
-
-def task_wrapper(task):
-    """Task wrapper for multithread_func
-
-    Args:
-        task[0]: function to be wrapped.
-        task[1]: function args.
-
-    Returns:
-        Return value of wrapped function call.
-    """
-    func = task[0]
-    params = task[1]
-    return func(*params)
-
-
-def run_multithread_func_async(log, task):
-    """Starts a multi-threaded function asynchronously.
-
-    Args:
-        log: log object.
-        task: a task to be executed in parallel.
-
-    Returns:
-        Future object representing the execution of the task.
-    """
-    executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
-    try:
-        future_object = executor.submit(task_wrapper, task)
-    except Exception as e:
-        log.error("Exception error %s", e)
-        raise
-    return future_object
-
-
-def run_multithread_func(log, tasks):
-    """Run multi-thread functions and return results.
-
-    Args:
-        log: log object.
-        tasks: a list of tasks to be executed in parallel.
-
-    Returns:
-        results for tasks.
-    """
-    MAX_NUMBER_OF_WORKERS = 10
-    number_of_workers = min(MAX_NUMBER_OF_WORKERS, len(tasks))
-    executor = concurrent.futures.ThreadPoolExecutor(
-        max_workers=number_of_workers)
-    if not log: log = logging
-    try:
-        results = list(executor.map(task_wrapper, tasks))
-    except Exception as e:
-        log.error("Exception error %s", e)
-        raise
-    executor.shutdown()
-    if log:
-        log.info("multithread_func %s result: %s",
-                 [task[0].__name__ for task in tasks], results)
-    return results
-
-
-def multithread_func(log, tasks):
-    """Multi-thread function wrapper.
-
-    Args:
-        log: log object.
-        tasks: tasks to be executed in parallel.
-
-    Returns:
-        True if all tasks return True.
-        False if any task return False.
-    """
-    results = run_multithread_func(log, tasks)
-    for r in results:
-        if not r:
-            return False
-    return True
-
-
-def multithread_func_and_check_results(log, tasks, expected_results):
-    """Multi-thread function wrapper.
-
-    Args:
-        log: log object.
-        tasks: tasks to be executed in parallel.
-        expected_results: check if the results from tasks match expected_results.
-
-    Returns:
-        True if expected_results are met.
-        False if expected_results are not met.
-    """
-    return_value = True
-    results = run_multithread_func(log, tasks)
-    log.info("multithread_func result: %s, expecting %s", results,
-             expected_results)
-    for task, result, expected_result in zip(tasks, results, expected_results):
-        if result != expected_result:
-            logging.info("Result for task %s is %s, expecting %s", task[0],
-                         result, expected_result)
-            return_value = False
-    return return_value
diff --git a/src/antlion/libs/utils/timer.py b/src/antlion/libs/utils/timer.py
deleted file mode 100644
index 2350aa9..0000000
--- a/src/antlion/libs/utils/timer.py
+++ /dev/null
@@ -1,132 +0,0 @@
-"""A simple timer class to keep record of the elapsed time."""
-
-import time
-
-
-class TimeRecorder(object):
-    """Main class to keep time records.
-
-    A timer record contains an ID, a start timestamp, and an optional stop
-    timestamps. The elapsed time calculated as stop - start.
-    If the stop timestamp is not set, current system time will be used.
-
-    Example usage:
-    >>> timer = TimeRecorder()
-    >>> # start a single timer, ID = 'lunch'
-    >>> timer.start_timer('lunch')
-    >>> # start two timers at the same time
-    >>> timer.start_timer(['salad', 'dessert'])
-    >>> # stop a single timer
-    >>> timer.stop_timer('salad')
-    >>> # get elapsed time of all timers
-    >>> timer.elapsed()
-    """
-
-    def __init__(self):
-        self.recorder = dict()
-
-    def start_timer(self, record_ids='Default', force=False):
-        """Start one or more timer.
-
-        Starts one or more timer at current system time with the record ID
-        specified in record_ids. Will overwrite/restart existing timer.
-
-        Args:
-            record_ids: timer record IDs. Can be a string or a list of strings.
-                        If the record ID is a list, will start multiple timers
-                        at the same time.
-            force: Force update the timer's start time if the specified timer
-                   has already started. By default we won't update started timer
-                   again.
-
-        Returns:
-            Number of timer started.
-        """
-        if isinstance(record_ids, str):
-            record_ids = [record_ids]
-        start_time = time.time()
-        for rec in record_ids:
-            if force or rec not in self.recorder:
-                self.recorder[rec] = [start_time, None]
-        return len(record_ids)
-
-    def stop_timer(self, record_ids=None, force=False):
-        """Stop one or more timer.
-
-        Stops one or more timer at current system time.
-
-        Args:
-            record_ids: timer record IDs. Can be a string or a list of strings.
-                        If the record ID is a list, will stop multiple timers at
-                        the same time. By default, it will stop all timers.
-            force: Force update the timer's stop time if the specified timer has
-                   already stopped. By default we won't update stopped timer
-                   again.
-
-        Returns:
-            Number of timer stopped.
-        """
-        # stop all record if id is not provided.
-        if record_ids is None:
-            record_ids = self.recorder.keys()
-        elif isinstance(record_ids, str):
-            record_ids = [record_ids]
-        stop_time = time.time()
-        num_rec = 0
-        for rec in record_ids:
-            if rec in self.recorder:
-                if force or self.recorder[rec][1] is None:
-                    self.recorder[rec][1] = stop_time
-                    num_rec += 1
-        return num_rec
-
-    def elapsed(self, record_ids=None):
-        """Return elapsed time in seconds.
-
-        For records with no stop time, will calculate based on the current
-        system time.
-
-        Args:
-            record_ids: timer record IDs. Can be a string or a list of strings.
-                        If the record ID is a list, will compute the elapsed
-                        time for all specified timers. Default value (None)
-                        calculates elapsed time for all existing timers.
-
-        Returns:
-            The elapsed time. If the record_ids is a string, will return the
-            time in seconds as float type. If the record_ids is a list or
-            default (None), will return a dict of the <record id, elapsed time>.
-        """
-        single_record = False
-        if record_ids is None:
-            record_ids = self.recorder.keys()
-        elif isinstance(record_ids, str):
-            record_ids = [record_ids]
-            single_record = True
-        results = dict()
-        curr_time = time.time()
-        for rec in record_ids:
-            if rec in self.recorder:
-                if self.recorder[rec][1] is not None:
-                    results[rec] = self.recorder[rec][1] - self.recorder[rec][0]
-                else:
-                    results[rec] = curr_time - self.recorder[rec][0]
-        if not results:  # no valid record found
-            return None
-        elif single_record and len(record_ids) == 1:
-            # only 1 record is requested, return results directly
-            return results[record_ids[0]]
-        else:
-            return results  # multiple records, return a dict.
-
-    def clear(self, record_ids=None):
-        """Clear existing time records."""
-        if record_ids is None:
-            self.recorder = dict()
-            return
-
-        if isinstance(record_ids, str):
-            record_ids = [record_ids]
-        for rec in record_ids:
-            if rec in self.recorder:
-                del self.recorder[rec]
diff --git a/src/antlion/libs/version_selector.py b/src/antlion/libs/version_selector.py
deleted file mode 100644
index 2e55eb5..0000000
--- a/src/antlion/libs/version_selector.py
+++ /dev/null
@@ -1,291 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import bisect
-from collections import namedtuple
-import inspect
-import numbers
-
-
-def _fully_qualified_name(func):
-    """Returns the fully-qualified name of a function.
-
-    Note: __qualname__ is not the fully qualified name. It is the the fully
-          qualified name without the module name.
-
-    See: https://www.python.org/dev/peps/pep-3155/#naming-choice
-    """
-    return '%s:%s' % (func.__module__, func.__qualname__)
-
-
-_FrameInfo = namedtuple('_FrameInfo', ['frame', 'filename', 'lineno',
-                                       'function', 'code_context', 'index'])
-
-
-def _inspect_stack():
-    """Returns named tuple for each tuple returned by inspect.stack().
-
-    For Python3.4 and earlier, which returns unnamed tuples for inspect.stack().
-
-    Returns:
-        list of _FrameInfo named tuples representing stack frame info.
-    """
-    return [_FrameInfo(*info) for info in inspect.stack()]
-
-
-def set_version(get_version_func, min_version, max_version):
-    """Returns a decorator returning a VersionSelector containing all versions
-    of the decorated func.
-
-    Args:
-        get_version_func: The lambda that returns the version level based on the
-                          arguments sent to versioned_func
-        min_version: The minimum API level for calling versioned_func.
-        max_version: The maximum API level for calling versioned_func.
-
-    Raises:
-        SyntaxError if get_version_func is different between versioned funcs.
-
-    Returns:
-        A VersionSelector containing all versioned calls to the decorated func.
-    """
-    func_owner_variables = None
-    for frame_info in _inspect_stack():
-        if frame_info.function == '<module>':
-            # We've reached the end of the most recently imported module in our
-            # stack without finding a class first. This indicates that the
-            # decorator is on a module-level function.
-            func_owner_variables = frame_info.frame.f_locals
-            break
-        elif '__qualname__' in frame_info.frame.f_locals:
-            # __qualname__ appears in stack frames of objects that have
-            # yet to be interpreted. Here we can guarantee that the object in
-            # question is the innermost class that contains the function.
-            func_owner_variables = frame_info.frame.f_locals
-            break
-
-    def decorator(func):
-        if isinstance(func, (staticmethod, classmethod)):
-            raise SyntaxError('@staticmethod and @classmethod decorators must '
-                              'be placed before the versioning decorator.')
-        func_name = func.__name__
-
-        if func_name in func_owner_variables:
-            # If the function already exists within the class/module, get it.
-            version_selector = func_owner_variables[func_name]
-            if isinstance(version_selector, (staticmethod, classmethod)):
-                # If the function was also decorated with @staticmethod or
-                # @classmethod, the version_selector will be stored in __func__.
-                version_selector = version_selector.__func__
-            if not isinstance(version_selector, _VersionSelector):
-                raise SyntaxError('The previously defined function "%s" is not '
-                                  'decorated with a versioning decorator.' %
-                                  version_selector.__qualname__)
-            if (version_selector.comparison_func_name !=
-                    _fully_qualified_name(get_version_func)):
-                raise SyntaxError('Functions of the same name must be decorated'
-                                  ' with the same versioning decorator.')
-        else:
-            version_selector = _VersionSelector(get_version_func)
-
-        version_selector.add_fn(func, min_version, max_version)
-        return version_selector
-
-    return decorator
-
-
-class _VersionSelector(object):
-    """A class that maps API levels to versioned functions for that API level.
-
-    Attributes:
-        entry_list: A sorted list of Entries that define which functions to call
-                    for a given API level.
-    """
-
-    class ListWrap(object):
-        """This class wraps a list of VersionSelector.Entry objects.
-
-        This is required to make the bisect functions work, since the underlying
-        implementation of those functions do not use __cmp__, __lt__, __gt__,
-        etc. because they are not implemented in Python.
-
-        See: https://docs.python.org/3/library/bisect.html#other-examples
-        """
-
-        def __init__(self, entry_list):
-            self.list = entry_list
-
-        def __len__(self):
-            return len(self.list)
-
-        def __getitem__(self, index):
-            return self.list[index].level
-
-    class Entry(object):
-        def __init__(self, level, func, direction):
-            """Creates an Entry object.
-
-            Args:
-                level: The API level for this point.
-                func: The function to call.
-                direction: (-1, 0 or 1) the  direction the ray from this level
-                           points towards.
-            """
-            self.level = level
-            self.func = func
-            self.direction = direction
-
-    def __init__(self, version_func):
-        """Creates a VersionSelector object.
-
-        Args:
-            version_func: The function that converts the arguments into an
-                          integer that represents the API level.
-        """
-        self.entry_list = list()
-        self.get_version = version_func
-        self.instance = None
-        self.comparison_func_name = _fully_qualified_name(version_func)
-
-    def __name__(self):
-        if len(self.entry_list) > 0:
-            return self.entry_list[0].func.__name__
-        return '%s<%s>' % (self.__class__.__name__, self.get_version.__name__)
-
-    def print_ranges(self):
-        """Returns all ranges as a string.
-
-        The string is formatted as '[min_a, max_a], [min_b, max_b], ...'
-        """
-        ranges = []
-        min_boundary = None
-        for entry in self.entry_list:
-            if entry.direction == 1:
-                min_boundary = entry.level
-            elif entry.direction == 0:
-                ranges.append(str([entry.level, entry.level]))
-            else:
-                ranges.append(str([min_boundary, entry.level]))
-        return ', '.join(ranges)
-
-    def add_fn(self, fn, min_version, max_version):
-        """Adds a function to the VersionSelector for the given API range.
-
-        Args:
-            fn: The function to call when the API level is met.
-            min_version: The minimum version level for calling this function.
-            max_version: The maximum version level for calling this function.
-
-        Raises:
-            ValueError if min_version > max_version or another versioned
-                       function overlaps this new range.
-        """
-        if min_version > max_version:
-            raise ValueError('The minimum API level must be greater than the'
-                             'maximum API level.')
-        insertion_index = bisect.bisect_left(
-            _VersionSelector.ListWrap(self.entry_list), min_version)
-        if insertion_index != len(self.entry_list):
-            right_neighbor = self.entry_list[insertion_index]
-            if not (min_version <= max_version < right_neighbor.level and
-                    right_neighbor.direction != -1):
-                raise ValueError('New range overlaps another API level. '
-                                 'New range: %s, Existing ranges: %s' %
-                                 ([min_version, max_version],
-                                  self.print_ranges()))
-        if min_version == max_version:
-            new_entry = _VersionSelector.Entry(min_version, fn, direction=0)
-            self.entry_list.insert(insertion_index, new_entry)
-        else:
-            # Inserts the 2 entries into the entry list at insertion_index.
-            self.entry_list[insertion_index:insertion_index] = [
-                _VersionSelector.Entry(min_version, fn, direction=1),
-                _VersionSelector.Entry(max_version, fn, direction=-1)]
-
-    def __call__(self, *args, **kwargs):
-        """Calls the proper versioned function for the given API level.
-
-        This is a magic python function that gets called whenever parentheses
-        immediately follow the attribute access (e.g. obj.version_selector()).
-
-        Args:
-            *args, **kwargs: The arguments passed into this call. These
-                             arguments are intended for the decorated function.
-
-        Returns:
-            The result of the called function.
-        """
-        if self.instance is not None:
-            # When the versioned function is a classmethod, the class is passed
-            # into __call__ as the first argument.
-            level = self.get_version(self.instance, *args, **kwargs)
-        else:
-            level = self.get_version(*args, **kwargs)
-        if not isinstance(level, numbers.Number):
-            kwargs_out = []
-            for key, value in kwargs.items():
-                kwargs_out.append('%s=%s' % (key, str(value)))
-            args_out = str(list(args))[1:-1]
-            kwargs_out = ', '.join(kwargs_out)
-            raise ValueError(
-                'The API level the function %s returned %s for the arguments '
-                '(%s). This function must return a number.' %
-                (self.get_version.__qualname__, repr(level),
-                 ', '.join(i for i in [args_out, kwargs_out] if i)))
-
-        index = bisect.bisect_left(_VersionSelector.ListWrap(self.entry_list),
-                                   level)
-
-        # Check to make sure the function being called is within the API range
-        if index == len(self.entry_list):
-            raise NotImplementedError('No function %s exists for API level %s'
-                                      % (self.entry_list[0].func.__qualname__,
-                                         level))
-        closest_entry = self.entry_list[index]
-        if (closest_entry.direction == 0 and closest_entry.level != level or
-                closest_entry.direction == 1 and closest_entry.level > level or
-                closest_entry.direction == -1 and closest_entry.level < level):
-            raise NotImplementedError('No function %s exists for API level %s'
-                                      % (self.entry_list[0].func.__qualname__,
-                                         level))
-
-        func = self.entry_list[index].func
-        if self.instance is None:
-            # __get__ was not called, so the function is module-level.
-            return func(*args, **kwargs)
-
-        return func(self.instance, *args, **kwargs)
-
-    def __get__(self, instance, owner):
-        """Gets the instance and owner whenever this function is obtained.
-
-        These arguments will be used to pass in the self to instance methods.
-        If the function is marked with @staticmethod or @classmethod, those
-        decorators will handle removing self or getting the class, respectively.
-
-        Note that this function will NOT be called on module-level functions.
-
-        Args:
-            instance: The instance of the object this function is being called
-                      from. If this function is static or a classmethod,
-                      instance will be None.
-            owner: The object that owns this function. This is the class object
-                   that defines the function.
-
-        Returns:
-            self, this VersionSelector instance.
-        """
-        self.instance = instance
-        return self
diff --git a/src/antlion/libs/yaml_writer.py b/src/antlion/libs/yaml_writer.py
index 8c710e1..33c349f 100644
--- a/src/antlion/libs/yaml_writer.py
+++ b/src/antlion/libs/yaml_writer.py
@@ -18,25 +18,27 @@
 import yaml
 
 # Allow yaml to dump OrderedDict
-yaml.add_representer(collections.OrderedDict,
-                     lambda dumper, data: dumper.represent_dict(data),
-                     Dumper=yaml.SafeDumper)
+yaml.add_representer(
+    collections.OrderedDict,
+    lambda dumper, data: dumper.represent_dict(data),
+    Dumper=yaml.SafeDumper,
+)
 
 
 def _str_representer(dumper, data):
     if len(data.splitlines()) > 1:
-        data = '\n'.join(line.replace('\t', '    ').rstrip()
-                         for line in data.splitlines())
-        return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
-    return dumper.represent_scalar('tag:yaml.org,2002:str', data)
+        data = "\n".join(
+            line.replace("\t", "    ").rstrip() for line in data.splitlines()
+        )
+        return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
+    return dumper.represent_scalar("tag:yaml.org,2002:str", data)
 
 
 # Automatically convert multiline strings into block literals
 yaml.add_representer(str, _str_representer, Dumper=yaml.SafeDumper)
 
-
 _DUMP_KWARGS = dict(explicit_start=True, allow_unicode=True, indent=4)
-if yaml.__version__ >= '5.1':
+if yaml.__version__ >= "5.1":
     _DUMP_KWARGS.update(sort_keys=False)
 
 
diff --git a/src/antlion/logger.py b/src/antlion/logger.py
index 599e08b..1d18ad8 100755
--- a/src/antlion/logger.py
+++ b/src/antlion/logger.py
@@ -84,8 +84,8 @@
     def format(self, record):
         colored_record = copy(record)
         level_name = colored_record.levelname
-        style = LOG_LEVELS[level_name]['style']
-        formatted_level_name = '%s%s%s' % (style, level_name, Style.RESET)
+        style = LOG_LEVELS[level_name]["style"]
+        formatted_level_name = "%s%s%s" % (style, level_name, Style.RESET)
         colored_record.levelname = formatted_level_name
         return super().format(colored_record)
 
@@ -100,10 +100,10 @@
         An iterable of date and time elements in the order of month, day, hour,
         minute, second, microsecond.
     """
-    date, time = t.split(' ')
-    year, month, day = date.split('-')
-    h, m, s = time.split(':')
-    s, ms = s.split('.')
+    date, time = t.split(" ")
+    year, month, day = date.split("-")
+    h, m, s = time.split(":")
+    s, ms = s.split(".")
     return year, month, day, h, m, s, ms
 
 
@@ -201,19 +201,20 @@
     logging.log_path = log_path
     log_styles = [
         LogStyles.LOG_INFO + LogStyles.TO_STDOUT,
-        LogStyles.DEFAULT_LEVELS + LogStyles.TESTCASE_LOG
+        LogStyles.DEFAULT_LEVELS + LogStyles.TESTCASE_LOG,
     ]
     terminal_format = log_line_format
     if prefix:
         terminal_format = "[{}] {}".format(prefix, log_line_format)
-    stream_formatter = ColoredLogFormatter(terminal_format,
-                                           log_line_time_format)
+    stream_formatter = ColoredLogFormatter(terminal_format, log_line_time_format)
     file_formatter = logging.Formatter(log_line_format, log_line_time_format)
-    log = log_stream.create_logger('test_run',
-                                   '',
-                                   log_styles=log_styles,
-                                   stream_format=stream_formatter,
-                                   file_format=file_formatter)
+    log = log_stream.create_logger(
+        "test_run",
+        "",
+        log_styles=log_styles,
+        stream_format=stream_formatter,
+        file_format=file_formatter,
+    )
     log.setLevel(logging.DEBUG)
     _enable_additional_log_levels()
 
@@ -221,7 +222,7 @@
 def _enable_additional_log_levels():
     """Enables logging levels used for tracing tests and debugging devices."""
     for log_type, log_data in LOG_LEVELS.items():
-        logging.addLevelName(log_data['level'], log_type)
+        logging.addLevelName(log_data["level"], log_type)
 
 
 def kill_test_logger(logger):
@@ -248,8 +249,7 @@
     try:
         os.symlink(actual_path, link_path)
     except OSError:
-        logging.warning('Failed to create symlink to latest logs dir.',
-                        exc_info=True)
+        logging.warning("Failed to create symlink to latest logs dir.", exc_info=True)
 
 
 def setup_test_logger(log_path, prefix=None):
@@ -277,13 +277,14 @@
         A string representing the same time as input timestamp, but without
         special characters.
     """
-    norm_tp = log_line_timestamp.replace(' ', '_')
-    norm_tp = norm_tp.replace(':', '-')
+    norm_tp = log_line_timestamp.replace(" ", "_")
+    norm_tp = norm_tp.replace(":", "-")
     return norm_tp
 
 
 class LoggerAdapter(logging.LoggerAdapter):
     """A LoggerAdapter class that takes in a lambda for transforming logs."""
+
     def __init__(self, logging_lambda):
         self.logging_lambda = logging_lambda
         super(LoggerAdapter, self).__init__(logging.getLogger(), {})
@@ -302,7 +303,7 @@
     return tracelogger.TraceLogger(LoggerAdapter(logging_lambda))
 
 
-def create_tagged_trace_logger(tag=''):
+def create_tagged_trace_logger(tag=""):
     """Returns a logger that logs each line with the given prefix.
 
     Args:
@@ -311,7 +312,8 @@
 
             <TESTBED> <TIME> <LOG_LEVEL> [tag123] logged message
     """
+
     def logging_lambda(msg):
-        return '[%s] %s' % (tag, msg)
+        return "[%s] %s" % (tag, msg)
 
     return create_logger(logging_lambda)
diff --git a/src/antlion/net.py b/src/antlion/net.py
new file mode 100644
index 0000000..6f56703
--- /dev/null
+++ b/src/antlion/net.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import errno
+import time
+import socket
+
+from typing import Optional
+
+
+def wait_for_port(host: str, port: int, timeout_sec: int = 5) -> None:
+    """Wait for the host to start accepting connections on the port.
+
+    Some services take some time to start. Call this after launching the service
+    to avoid race conditions.
+
+    Args:
+        host: IP of the running service.
+        port: Port of the running service.
+        timeout_sec: Seconds to wait until raising TimeoutError
+
+    Raises:
+        TimeoutError: when timeout_sec has expired without a successful
+            connection to the service
+    """
+    last_error: Optional[OSError] = None
+    timeout = time.perf_counter() + timeout_sec
+
+    while True:
+        try:
+            time_left = max(timeout - time.perf_counter(), 0)
+            with socket.create_connection((host, port), timeout=time_left):
+                return
+        except ConnectionRefusedError as e:
+            # Occurs when the host is online but not ready to accept connections
+            # yet; wait to see if the host becomes ready.
+            last_error = e
+        except socket.timeout as e:
+            # socket.timeout was aliased to TimeoutError in Python 3.10.
+            last_error = e
+        except OSError as e:
+            if e.errno == errno.EHOSTUNREACH:
+                # No route to host. Occurs when the interface to the host is
+                # torn down; wait to see if the interface comes back.
+                last_error = e
+            else:
+                # Unexpected error
+                raise e
+
+        if time.perf_counter() >= timeout:
+            raise TimeoutError(
+                f"Waited over {timeout_sec}s for the service to start "
+                f"accepting connections at {host}:{port}"
+            ) from last_error
diff --git a/src/antlion/records.py b/src/antlion/records.py
index aee2385..1c7ad23 100644
--- a/src/antlion/records.py
+++ b/src/antlion/records.py
@@ -44,15 +44,15 @@
         See MoblyTestSummaryWriter.dump for documentation.
         """
         new_content = collections.OrderedDict(copy.deepcopy(content))
-        new_content['Type'] = entry_type.value
-        new_content.move_to_end('Type', last=False)
+        new_content["Type"] = entry_type.value
+        new_content.move_to_end("Type", last=False)
         # Both user code and Mobly code can trigger this dump, hence the lock.
         with self._lock:
             # For Python3, setting the encoding on yaml.safe_dump does not work
             # because Python3 file descriptors set an encoding by default, which
             # PyYAML uses instead of the encoding on yaml.safe_dump. So, the
             # encoding has to be set on the open call instead.
-            with io.open(self._path, 'a', encoding='utf-8') as f:
+            with io.open(self._path, "a", encoding="utf-8") as f:
                 # Use safe_dump here to avoid language-specific tags in final
                 # output.
                 yaml_writer.safe_dump(new_content, f)
@@ -95,8 +95,7 @@
         Sets the begin_time of this record.
         """
         super().test_begin()
-        self.log_begin_time = logger.epoch_to_log_line_timestamp(
-            self.begin_time)
+        self.log_begin_time = logger.epoch_to_log_line_timestamp(self.begin_time)
 
     def _test_end(self, result, e):
         """Class internal function to signal the end of a test case execution.
@@ -109,8 +108,7 @@
         """
         super()._test_end(result, e)
         if self.end_time:
-            self.log_end_time = logger.epoch_to_log_line_timestamp(
-                self.end_time)
+            self.log_end_time = logger.epoch_to_log_line_timestamp(self.end_time)
 
     def to_dict(self):
         """Gets a dictionary representing the content of this class.
@@ -130,8 +128,7 @@
         d[TestResultEnums.RECORD_EXTRAS] = self.extras
         d[TestResultEnums.RECORD_DETAILS] = self.details
         d[TestResultEnums.RECORD_EXTRA_ERRORS] = {
-            key: value.to_dict()
-            for (key, value) in self.extra_errors.items()
+            key: value.to_dict() for (key, value) in self.extra_errors.items()
         }
         d[TestResultEnums.RECORD_STACKTRACE] = self.stacktrace
         return d
@@ -181,8 +178,7 @@
             A TestResult instance that's the sum of two TestResult instances.
         """
         if not isinstance(r, MoblyTestResult):
-            raise TypeError("Operand %s of type %s is not a TestResult." %
-                            (r, type(r)))
+            raise TypeError("Operand %s of type %s is not a TestResult." % (r, type(r)))
         sum_result = TestResult()
         for name in sum_result.__dict__:
             r_value = getattr(r, name)
@@ -208,8 +204,10 @@
             A json-format string representing the test results.
         """
         d = collections.OrderedDict()
-        d["ControllerInfo"] = {record.controller_name: record.controller_info
-                               for record in self.controller_info}
+        d["ControllerInfo"] = {
+            record.controller_name: record.controller_info
+            for record in self.controller_info
+        }
         d["Results"] = [record.to_dict() for record in self.executed]
         d["Summary"] = self.summary_dict()
         d["Error"] = self.errors_list()
@@ -236,12 +234,13 @@
         l = list()
         for record in self.error:
             if isinstance(record, TestResultRecord):
-                keys = [TestResultEnums.RECORD_NAME,
-                        TestResultEnums.RECORD_DETAILS,
-                        TestResultEnums.RECORD_EXTRA_ERRORS]
+                keys = [
+                    TestResultEnums.RECORD_NAME,
+                    TestResultEnums.RECORD_DETAILS,
+                    TestResultEnums.RECORD_EXTRA_ERRORS,
+                ]
             elif isinstance(record, ExceptionRecord):
-                keys = [TestResultEnums.RECORD_DETAILS,
-                        TestResultEnums.RECORD_POSITION]
+                keys = [TestResultEnums.RECORD_DETAILS, TestResultEnums.RECORD_POSITION]
             else:
                 return []
             l.append({k: record.to_dict()[k] for k in keys})
diff --git a/src/antlion/test_decorators.py b/src/antlion/test_decorators.py
index 2ec1835..a152f4f 100644
--- a/src/antlion/test_decorators.py
+++ b/src/antlion/test_decorators.py
@@ -21,8 +21,7 @@
     return test_signals[-1]
 
 
-def repeated_test(num_passes, acceptable_failures=0,
-                  result_selector=__select_last):
+def repeated_test(num_passes, acceptable_failures=0, result_selector=__select_last):
     """A decorator that runs a test case multiple times.
 
     This decorator can be used to run a test multiple times and aggregate the
@@ -49,8 +48,9 @@
             returns the test signal to report the test case as. Note that the
             list also contains any uncaught exceptions from the test execution.
     """
+
     def decorator(func):
-        if not func.__name__.startswith('test_'):
+        if not func.__name__.startswith("test_"):
             raise ValueError('Tests must start with "test_".')
 
         def test_wrapper(self):
@@ -60,8 +60,11 @@
             for i in range(num_passes + acceptable_failures):
                 try:
                     func(self, i + 1)
-                except (signals.TestFailure, signals.TestError,
-                        AssertionError) as signal:
+                except (
+                    signals.TestFailure,
+                    signals.TestError,
+                    AssertionError,
+                ) as signal:
                     test_signals_received.append(signal)
                     num_failures += 1
                 except signals.TestPass as signal:
@@ -74,9 +77,12 @@
                     num_failures += 1
                 else:
                     num_seen_passes += 1
-                    test_signals_received.append(signals.TestPass(
-                        'Test iteration %s of %s passed without details.' % (
-                        i, func.__name__)))
+                    test_signals_received.append(
+                        signals.TestPass(
+                            "Test iteration %s of %s passed without details."
+                            % (i, func.__name__)
+                        )
+                    )
 
                 if num_failures > acceptable_failures:
                     break
diff --git a/src/antlion/test_runner.py b/src/antlion/test_runner.py
index 261d0bd..bcb516f 100644
--- a/src/antlion/test_runner.py
+++ b/src/antlion/test_runner.py
@@ -44,14 +44,16 @@
         The test class in the test module.
     """
     test_classes = []
-    main_module_members = sys.modules['__main__']
+    main_module_members = sys.modules["__main__"]
     for _, module_member in main_module_members.__dict__.items():
         if inspect.isclass(module_member):
             if issubclass(module_member, base_test.BaseTestClass):
                 test_classes.append(module_member)
     if len(test_classes) != 1:
-        logging.error('Expected 1 test class per file, found %s.',
-                      [t.__name__ for t in test_classes])
+        logging.error(
+            "Expected 1 test class per file, found %s.",
+            [t.__name__ for t in test_classes],
+        )
         sys.exit(1)
     return test_classes[0]
 
@@ -82,7 +84,7 @@
     except signals.TestAbortAll:
         raise
     except:
-        logging.exception('Exception when executing %s.', tr.testbed_name)
+        logging.exception("Exception when executing %s.", tr.testbed_name)
     finally:
         tr.stop()
 
@@ -107,14 +109,15 @@
         self.test_run_config = test_configs
         self.testbed_name = self.test_run_config.testbed_name
         start_time = logger.get_log_file_timestamp()
-        self.id = '{}@{}'.format(self.testbed_name, start_time)
+        self.id = "{}@{}".format(self.testbed_name, start_time)
         self.test_run_config.log_path = os.path.abspath(
-            os.path.join(self.test_run_config.log_path, self.testbed_name,
-                         start_time))
+            os.path.join(self.test_run_config.log_path, self.testbed_name, start_time)
+        )
         logger.setup_test_logger(self.log_path, self.testbed_name)
         self.log = logging.getLogger()
         self.test_run_config.summary_writer = records.TestSummaryWriter(
-            os.path.join(self.log_path, records.OUTPUT_FILE_SUMMARY))
+            os.path.join(self.log_path, records.OUTPUT_FILE_SUMMARY)
+        )
         self.run_list = run_list
         self.dump_config()
         self.results = records.TestResult()
@@ -147,8 +150,8 @@
         """
 
         def is_testfile_name(name, ext):
-            if ext == '.py':
-                if name.endswith('Test') or name.endswith('_test'):
+            if ext == ".py":
+                if name.endswith("Test") or name.endswith("_test"):
                     return True
             return False
 
@@ -157,20 +160,20 @@
         for path, name, _ in file_list:
             sys.path.append(path)
             try:
-                with utils.SuppressLogOutput(
-                        log_levels=[logging.INFO, logging.ERROR]):
+                with utils.SuppressLogOutput(log_levels=[logging.INFO, logging.ERROR]):
                     module = importlib.import_module(name)
             except Exception as e:
-                logging.debug('Failed to import %s: %s', path, str(e))
+                logging.debug("Failed to import %s: %s", path, str(e))
                 for test_cls_name, _ in self.run_list:
-                    alt_name = name.replace('_', '').lower()
+                    alt_name = name.replace("_", "").lower()
                     alt_cls_name = test_cls_name.lower()
                     # Only block if a test class on the run list causes an
                     # import error. We need to check against both naming
                     # conventions: AaaBbb and aaa_bbb.
                     if name == test_cls_name or alt_name == alt_cls_name:
-                        msg = ('Encountered error importing test class %s, '
-                               'abort.') % test_cls_name
+                        msg = (
+                            "Encountered error importing test class %s, " "abort."
+                        ) % test_cls_name
                         # This exception is logged here to help with debugging
                         # under py2, because "raise X from Y" syntax is only
                         # supported under py3.
@@ -178,8 +181,8 @@
                         raise ValueError(msg)
                 continue
             for member_name in dir(module):
-                if not member_name.startswith('__'):
-                    if member_name.endswith('Test'):
+                if not member_name.startswith("__"):
+                    if member_name.endswith("Test"):
                         test_class = getattr(module, member_name)
                         if inspect.isclass(test_class):
                             test_classes[member_name] = test_class
@@ -203,15 +206,17 @@
         matches = fnmatch.filter(self.test_classes.keys(), test_cls_name)
         if not matches:
             self.log.info(
-                'Cannot find test class %s or classes matching pattern, '
-                'skipping for now.' % test_cls_name)
-            record = records.TestResultRecord('*all*', test_cls_name)
-            record.test_skip(signals.TestSkip('Test class does not exist.'))
+                "Cannot find test class %s or classes matching pattern, "
+                "skipping for now." % test_cls_name
+            )
+            record = records.TestResultRecord("*all*", test_cls_name)
+            record.test_skip(signals.TestSkip("Test class does not exist."))
             self.results.add_record(record)
             return
         if matches != [test_cls_name]:
-            self.log.info('Found classes matching pattern %s: %s',
-                          test_cls_name, matches)
+            self.log.info(
+                "Found classes matching pattern %s: %s", test_cls_name, matches
+            )
 
         for test_cls_name_match in matches:
             test_cls = self.test_classes[test_cls_name_match]
@@ -245,27 +250,30 @@
             self.test_classes = {test_class.__name__: test_class}
         else:
             t_paths = self.test_run_config.controller_configs[
-                keys.Config.key_test_paths.value]
+                keys.Config.key_test_paths.value
+            ]
             self.test_classes = self.import_test_modules(t_paths)
-        self.log.debug('Executing run list %s.', self.run_list)
+        self.log.debug("Executing run list %s.", self.run_list)
         for test_cls_name, test_case_names in self.run_list:
             if not self.running:
                 break
 
             if test_case_names:
-                self.log.debug('Executing test cases %s in test class %s.',
-                               test_case_names, test_cls_name)
+                self.log.debug(
+                    "Executing test cases %s in test class %s.",
+                    test_case_names,
+                    test_cls_name,
+                )
             else:
-                self.log.debug('Executing test class %s', test_cls_name)
+                self.log.debug("Executing test class %s", test_cls_name)
 
             try:
                 self.run_test_class(test_cls_name, test_case_names)
             except error.ActsError as e:
                 self.results.error.append(ExceptionRecord(e))
-                self.log.error('Test Runner Error: %s' % e.details)
+                self.log.error("Test Runner Error: %s" % e.details)
             except signals.TestAbortAll as e:
-                self.log.warning(
-                    'Abort all subsequent test classes. Reason: %s', e)
+                self.log.warning("Abort all subsequent test classes. Reason: %s", e)
                 raise
 
     def stop(self):
@@ -275,8 +283,10 @@
         This function concludes a test run and writes out a test report.
         """
         if self.running:
-            msg = '\nSummary for test run %s: %s\n' % (
-                self.id, self.results.summary_str())
+            msg = "\nSummary for test run %s: %s\n" % (
+                self.id,
+                self.results.summary_str(),
+            )
             self._write_results_to_file()
             self.log.info(msg.strip())
             logger.kill_test_logger(self.log)
@@ -285,29 +295,34 @@
     def _write_results_to_file(self):
         """Writes test results to file(s) in a serializable format."""
         # Old JSON format
-        path = os.path.join(self.log_path, 'test_run_summary.json')
-        with open(path, 'w') as f:
+        path = os.path.join(self.log_path, "test_run_summary.json")
+        with open(path, "w") as f:
             f.write(self.results.json_str())
         # New YAML format
-        self.summary_writer.dump(self.results.summary_dict(),
-                                 records.TestSummaryEntryType.SUMMARY)
+        self.summary_writer.dump(
+            self.results.summary_dict(), records.TestSummaryEntryType.SUMMARY
+        )
 
     def dump_config(self):
         """Writes the test config to a JSON file under self.log_path"""
-        config_path = os.path.join(self.log_path, 'test_configs.json')
-        with open(config_path, 'a') as f:
-            json.dump(dict(
-                itertools.chain(
-                    self.test_run_config.user_params.items(),
-                    self.test_run_config.controller_configs.items())),
-                      f,
-                      skipkeys=True,
-                      indent=4)
+        config_path = os.path.join(self.log_path, "test_configs.json")
+        with open(config_path, "a") as f:
+            json.dump(
+                dict(
+                    itertools.chain(
+                        self.test_run_config.user_params.items(),
+                        self.test_run_config.controller_configs.items(),
+                    )
+                ),
+                f,
+                skipkeys=True,
+                indent=4,
+            )
 
     def write_test_campaign(self):
         """Log test campaign file."""
-        path = os.path.join(self.log_path, 'test_campaign.log')
-        with open(path, 'w') as f:
+        path = os.path.join(self.log_path, "test_campaign.log")
+        with open(path, "w") as f:
             for test_class, test_cases in self.run_list:
-                f.write('%s:\n%s' % (test_class, ',\n'.join(test_cases)))
-                f.write('\n\n')
+                f.write("%s:\n%s" % (test_class, ",\n".join(test_cases)))
+                f.write("\n\n")
diff --git a/src/antlion/test_utils/abstract_devices/bluetooth_device.py b/src/antlion/test_utils/abstract_devices/bluetooth_device.py
deleted file mode 100644
index c0cb29e..0000000
--- a/src/antlion/test_utils/abstract_devices/bluetooth_device.py
+++ /dev/null
@@ -1,1468 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-import logging
-
-from queue import Empty
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_event
-from antlion.test_utils.bt.bt_constants import scan_result
-from antlion.test_utils.bt.bt_gatt_utils import GattTestUtilsError
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_connection
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-
-import antlion.test_utils.bt.bt_test_utils as bt_test_utils
-
-
-def create_bluetooth_device(hardware_device):
-    """Creates a generic Bluetooth device based on type of device that is sent
-    to the functions.
-
-    Args:
-        hardware_device: A Bluetooth hardware device that is supported by ACTS.
-    """
-    if isinstance(hardware_device, FuchsiaDevice):
-        return FuchsiaBluetoothDevice(hardware_device)
-    elif isinstance(hardware_device, AndroidDevice):
-        return AndroidBluetoothDevice(hardware_device)
-    else:
-        raise ValueError('Unable to create BluetoothDevice for type %s' %
-                         type(hardware_device))
-
-
-class BluetoothDevice(object):
-    """Class representing a generic Bluetooth device.
-
-    Each object of this class represents a generic Bluetooth device.
-    Android device and Fuchsia devices are the currently supported devices.
-
-    Attributes:
-        device: A generic Bluetooth device.
-    """
-
-    def __init__(self, device):
-        self.device = device
-        self.log = logging
-
-    def a2dp_initiate_open_stream(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_profile_a2dp_sink(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def stop_profile_a2dp_sink(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_pairing_helper(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def set_discoverable(self, is_discoverable):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def bluetooth_toggle_state(self, state):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_discover_characteristic_by_uuid(self, peer_identifier,
-                                                    uuid):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def initialize_bluetooth_controller(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def get_pairing_pin(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def input_pairing_pin(self, pin):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def get_bluetooth_local_address(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_connect(self, peer_identifier, transport, autoconnect):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_write_characteristic_without_response_by_handle(
-            self, peer_identifier, handle, value):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_write_characteristic_by_handle(self, peer_identifier,
-                                                   handle, offset, value):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_characteristic_by_handle(self, peer_identifier,
-                                                  handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_characteristic_by_uuid(self, peer_identifier, uuid):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_long_characteristic_by_handle(self, peer_identifier,
-                                                       handle, offset,
-                                                       max_bytes):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_enable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_disable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_descriptor_by_handle(self, peer_identifier, handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_write_descriptor_by_handle(self, peer_identifier, handle,
-                                               offset, value):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_long_read_descriptor_by_handle(self, peer_identifier,
-                                                   handle, offset, max_bytes):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_disconnect(self, peer_identifier):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_refresh(self, peer_identifier):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def le_scan_with_name_filter(self, name, timeout):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def log_info(self, log):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def reset_bluetooth(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_add_search(self, attribute_list, profile_id):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_add_service(self, sdp_record):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_clean_up(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_init(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_remove_service(self, service_id):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_le_advertisement(self, adv_data, scan_response, adv_interval,
-                               connectable):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def stop_le_advertisement(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def set_bluetooth_local_name(self, name):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def setup_gatt_server(self, database):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def close_gatt_server(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def unbond_device(self, peer_identifier):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def unbond_all_known_devices(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def init_pair(self, peer_identifier, security_level, non_bondable,
-                  transport):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-
-class AndroidBluetoothDevice(BluetoothDevice):
-    """Class wrapper for an Android Bluetooth device.
-
-    Each object of this class represents a generic Bluetooth device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        android_device: An Android Bluetooth device.
-    """
-
-    def __init__(self, android_device):
-        super().__init__(android_device)
-        self.gatt_timeout = 10
-        self.peer_mapping = {}
-        self.discovered_services_index = None
-
-    def _client_wait(self, gatt_event, gatt_callback):
-        return self._timed_pop(gatt_event, gatt_callback)
-
-    def _timed_pop(self, gatt_event, gatt_callback):
-        expected_event = gatt_event["evt"].format(gatt_callback)
-        try:
-            return self.device.ed.pop_event(expected_event, self.gatt_timeout)
-        except Empty as emp:
-            raise AssertionError(gatt_event["err"].format(expected_event))
-
-    def _setup_discovered_services_index(self, bluetooth_gatt):
-        """ Sets the discovered services index for the gatt connection
-        related to the Bluetooth GATT callback object.
-
-        Args:
-            bluetooth_gatt: The BluetoothGatt callback id
-        """
-        if not self.discovered_services_index:
-            self.device.droid.gattClientDiscoverServices(bluetooth_gatt)
-            expected_event = gatt_cb_strings['gatt_serv_disc'].format(
-                self.gatt_callback)
-            event = self.dut.ed.pop_event(expected_event, self.gatt_timeout)
-            self.discovered_services_index = event['data']['ServicesIndex']
-
-    def a2dp_initiate_open_stream(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_profile_a2dp_sink(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def stop_profile_a2dp_sink(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def bluetooth_toggle_state(self, state):
-        self.device.droid.bluetoothToggleState(state)
-
-    def set_discoverable(self, is_discoverable):
-        """ Sets the device's discoverability.
-
-        Args:
-            is_discoverable: True if discoverable, false if not discoverable
-        """
-        if is_discoverable:
-            self.device.droid.bluetoothMakeDiscoverable()
-        else:
-            self.device.droid.bluetoothMakeUndiscoverable()
-
-    def initialize_bluetooth_controller(self):
-        """ Just pass for Android as there is no concept of initializing
-        a Bluetooth controller.
-        """
-
-    def start_pairing_helper(self):
-        """ Starts the Android pairing helper.
-        """
-        self.device.droid.bluetoothStartPairingHelper(True)
-
-    def gatt_client_write_characteristic_without_response_by_handle(
-            self, peer_identifier, handle, value):
-        """ Perform a GATT Client write Characteristic without response to
-        remote peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle (or instance id).
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.device.droid.gattClientWriteCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle, value)
-        try:
-            event = self._client_wait(gatt_event['char_write'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to write Characteristic: {}".format(err))
-        return True
-
-    def gatt_client_write_characteristic_by_handle(self, peer_identifier,
-                                                   handle, offset, value):
-        """ Perform a GATT Client write Characteristic without response to
-        remote peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle (or instance id).
-            offset: Not used yet.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.device.droid.gattClientWriteCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle, value)
-        try:
-            event = self._client_wait(gatt_event['char_write'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to write Characteristic: {}".format(err))
-        return True
-
-    def gatt_client_read_characteristic_by_handle(self, peer_identifier,
-                                                  handle):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle (or instance id).
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle)
-        try:
-            event = self._client_wait(gatt_event['char_read'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to read Characteristic: {}".format(err))
-
-        return event['data']['CharacteristicValue']
-
-    def gatt_client_read_long_characteristic_by_handle(self, peer_identifier,
-                                                       handle, offset,
-                                                       max_bytes):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            offset: Not used yet.
-            handle: The characteristic handle (or instance id).
-            max_bytes: Not used yet.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle)
-        try:
-            event = self._client_wait(gatt_event['char_read'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to read Characteristic: {}".format(err))
-
-        return event['data']['CharacteristicValue']
-
-    def gatt_client_enable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client enable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_disable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client disable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_descriptor_by_handle(self, peer_identifier, handle):
-        """ Perform a GATT Client read Descriptor to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The Descriptor handle (or instance id).
-        Returns:
-            Value of Descriptor if success, None if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadDescriptorByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle)
-        try:
-            event = self._client_wait(gatt_event['desc_read'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to read Descriptor: {}".format(err))
-        # TODO: Implement sending Descriptor value in SL4A such that the data
-        # can be represented by: event['data']['DescriptorValue']
-        return ""
-
-    def gatt_client_write_descriptor_by_handle(self, peer_identifier, handle,
-                                               offset, value):
-        """ Perform a GATT Client write Descriptor to the remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The Descriptor handle (or instance id).
-            offset: Not used yet
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.device.droid.gattClientWriteDescriptorByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle, value)
-        try:
-            event = self._client_wait(gatt_event['desc_write'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to write Characteristic: {}".format(err))
-        return True
-
-    def gatt_connect(self, peer_identifier, transport, autoconnect=False):
-        """ Perform a GATT connection to a perihperal.
-
-        Args:
-            peer_identifier: The mac address to connect to.
-            transport: Which transport to use.
-            autoconnect: Set autocnnect to True or False.
-        Returns:
-            True if success, False if failure.
-        """
-        try:
-            bluetooth_gatt, gatt_callback = setup_gatt_connection(
-                self.device, peer_identifier, autoconnect, transport)
-            self.peer_mapping[peer_identifier] = {
-                "bluetooth_gatt": bluetooth_gatt,
-                "gatt_callback": gatt_callback
-            }
-        except GattTestUtilsError as err:
-            self.log.error(err)
-            return False
-        return True
-
-    def gatt_disconnect(self, peer_identifier):
-        """ Perform a GATT disconnect from a perihperal.
-
-        Args:
-            peer_identifier: The peer to disconnect from.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "No previous connections made to {}".format(peer_identifier))
-            return False
-
-        try:
-            disconnect_gatt_connection(self.device,
-                                       peer_info.get("bluetooth_gatt"),
-                                       peer_info.get("gatt_callback"))
-            self.device.droid.gattClientClose(peer_info.get("bluetooth_gatt"))
-        except GattTestUtilsError as err:
-            self.log.error(err)
-            return False
-        self.device.droid.gattClientClose(peer_info.get("bluetooth_gatt"))
-
-    def gatt_client_refresh(self, peer_identifier):
-        """ Perform a GATT Client Refresh of a perihperal.
-
-        Clears the internal cache and forces a refresh of the services from the
-        remote device.
-
-        Args:
-            peer_identifier: The peer to refresh.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "No previous connections made to {}".format(peer_identifier))
-            return False
-        self.device.droid.gattClientRefresh(peer_info["bluetooth_gatt"])
-
-    def le_scan_with_name_filter(self, name, timeout):
-        """ Scan over LE for a specific device name.
-
-         Args:
-            name: The name filter to set.
-            timeout: The timeout to wait to find the advertisement.
-        Returns:
-            Discovered mac address or None
-        """
-        self.device.droid.bleSetScanSettingsScanMode(
-            ble_scan_settings_modes['low_latency'])
-        filter_list = self.device.droid.bleGenFilterList()
-        scan_settings = self.device.droid.bleBuildScanSetting()
-        scan_callback = self.device.droid.bleGenScanCallback()
-        self.device.droid.bleSetScanFilterDeviceName(name)
-        self.device.droid.bleBuildScanFilter(filter_list)
-        self.device.droid.bleSetScanFilterDeviceName(self.name)
-        self.device.droid.bleStartBleScan(filter_list, scan_settings,
-                                          scan_callback)
-        try:
-            event = self.device.ed.pop_event(scan_result.format(scan_callback),
-                                             timeout)
-            return event['data']['Result']['deviceInfo']['address']
-        except Empty as err:
-            self.log.info("Scanner did not find advertisement {}".format(err))
-            return None
-
-    def log_info(self, log):
-        """ Log directly onto the device.
-
-        Args:
-            log: The informative log.
-        """
-        self.device.droid.log.logI(log)
-
-    def set_bluetooth_local_name(self, name):
-        """ Sets the Bluetooth controller's local name
-        Args:
-            name: The name to set.
-        """
-        self.device.droid.bluetoothSetLocalName(name)
-
-    def get_local_bluetooth_address(self):
-        """ Returns the Bluetooth local address.
-        """
-        return self.device.droid.bluetoothGetLocalAddress()
-
-    def reset_bluetooth(self):
-        """ Resets Bluetooth on the Android Device.
-        """
-        bt_test_utils.reset_bluetooth([self.device])
-
-    def sdp_add_search(self, attribute_list, profile_id):
-        """Adds an SDP search record.
-        Args:
-            attribute_list: The list of attributes to set
-            profile_id: The profile ID to set.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def sdp_add_service(self, sdp_record):
-        """Adds an SDP service record.
-        Args:
-            sdp_record: The dictionary representing the search record to add.
-        Returns:
-            service_id: The service id to track the service record published.
-                None if failed.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def sdp_clean_up(self):
-        """Cleans up all objects related to SDP.
-        """
-        self.device.sl4f.sdp_lib.cleanUp()
-
-    def sdp_init(self):
-        """Initializes SDP on the device.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def sdp_remove_service(self, service_id):
-        """Removes a service based on an input id.
-        Args:
-            service_id: The service ID to remove.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def unbond_all_known_devices(self):
-        """ Unbond all known remote devices.
-        """
-        self.device.droid.bluetoothFactoryReset()
-
-    def unbond_device(self, peer_identifier):
-        """ Unbond peer identifier.
-
-        Args:
-            peer_identifier: The mac address for the peer to unbond.
-
-        """
-        self.device.droid.bluetoothUnbond(peer_identifier)
-
-    def init_pair(self, peer_identifier, security_level, non_bondable,
-                  transport):
-        """ Send an outgoing pairing request the input peer_identifier.
-
-        Android currently does not support setting various security levels or
-        bondable modes. Making them available for other bluetooth_device
-        variants. Depending on the Address type, Android will figure out the
-        transport to pair automatically.
-
-        Args:
-            peer_identifier: A string representing the device id.
-            security_level: Not yet implemented. See Fuchsia device impl.
-            non_bondable: Not yet implemented. See Fuchsia device impl.
-            transport: Not yet implemented. See Fuchsia device impl.
-
-        """
-        self.dut.droid.bluetoothBond(self.peer_identifier)
-
-
-class FuchsiaBluetoothDevice(BluetoothDevice):
-    """Class wrapper for an Fuchsia Bluetooth device.
-
-    Each object of this class represents a generic luetooth device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        fuchsia_device: A Fuchsia Bluetooth device.
-    """
-
-    def __init__(self, fuchsia_device):
-        super().__init__(fuchsia_device)
-
-    def a2dp_initiate_open_stream(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_profile_a2dp_sink(self):
-        """ Starts the A2DP sink profile.
-        """
-        self.device.start_v1_component("bt-a2dp-sink")
-
-    def stop_profile_a2dp_sink(self):
-        """ Stops the A2DP sink profile.
-        """
-        self.device.stop_v1_component("bt-a2dp-sink")
-
-    def start_pairing_helper(self):
-        self.device.sl4f.bts_lib.acceptPairing()
-
-    def bluetooth_toggle_state(self, state):
-        """Stub for Fuchsia implementation."""
-
-    def set_discoverable(self, is_discoverable):
-        """ Sets the device's discoverability.
-
-        Args:
-            is_discoverable: True if discoverable, false if not discoverable
-        """
-        self.device.sl4f.bts_lib.setDiscoverable(is_discoverable)
-
-    def get_pairing_pin(self):
-        """ Get the pairing pin from the active pairing delegate.
-        """
-        return self.device.sl4f.bts_lib.getPairingPin()['result']
-
-    def input_pairing_pin(self, pin):
-        """ Input pairing pin to active pairing delegate.
-
-        Args:
-            pin: The pin to input.
-        """
-        self.device.sl4f.bts_lib.inputPairingPin(pin)
-
-    def initialize_bluetooth_controller(self):
-        """ Initialize Bluetooth controller for first time use.
-        """
-        self.device.sl4f.bts_lib.initBluetoothSys()
-
-    def get_local_bluetooth_address(self):
-        """ Returns the Bluetooth local address.
-        """
-        return self.device.sl4f.bts_lib.getActiveAdapterAddress().get("result")
-
-    def set_bluetooth_local_name(self, name):
-        """ Sets the Bluetooth controller's local name
-        Args:
-            name: The name to set.
-        """
-        self.device.sl4f.bts_lib.setName(name)
-
-    def gatt_client_write_characteristic_without_response_by_handle(
-            self, peer_identifier, handle, value):
-        """ Perform a GATT Client write Characteristic without response to
-        remote peer GATT server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.writeCharByIdWithoutResponse(
-            handle, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_write_characteristic_by_handle(self, peer_identifier,
-                                                   handle, offset, value):
-        """ Perform a GATT Client write Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            offset: The offset to start writing to.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.writeCharById(
-            handle, offset, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_write_long_characteristic_by_handle(
-            self, peer_identifier, handle, offset, value, reliable_mode=False):
-        """ Perform a GATT Client write long Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            offset: The offset to start writing to.
-            value: The list of bytes to write.
-            reliable_mode: A bool value representing a reliable write or not.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.error(
-                "Unable to find handle {} in GATT server db.".format(handle))
-            return False
-        result = self.device.sl4f.gattc_lib.writeLongCharById(
-            handle, offset, value, reliable_mode)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write long characteristic handle {} with err: {}".
-                format(peer_identifier, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_write_long_descriptor_by_handle(self, peer_identifier,
-                                                    handle, offset, value):
-        """ Perform a GATT Client write long Descriptor to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The descriptor handle.
-            offset: The offset to start writing to.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.error(
-                "Unable to find handle {} in GATT server db.".format(handle))
-            return False
-        result = self.device.sl4f.gattc_lib.writeLongDescById(
-            handle, offset, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write long descriptor handle {} with err: {}".
-                format(peer_identifier, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_read_characteristic_by_handle(self, peer_identifier,
-                                                  handle):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.readCharacteristicById(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_read_characteristic_by_uuid(self, peer_identifier, uuid):
-        """ Perform a GATT Client read Characteristic by uuid to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            uuid: The characteristic uuid.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, uuid, uuid=True)):
-            self.log.warn(
-                "Unable to find uuid {} in GATT server db.".format(uuid))
-        result = self.device.sl4f.gattc_lib.readCharacteristicByType(uuid)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read characteristic uuid {} with err: {}".format(
-                    uuid, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_read_long_characteristic_by_handle(self, peer_identifier,
-                                                       handle, offset,
-                                                       max_bytes):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            offset: The offset to start reading.
-            max_bytes: The max bytes to return for each read.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.readLongCharacteristicById(
-            handle, offset, max_bytes)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_enable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client enable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.enableNotifyCharacteristic(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to enable characteristic notifications for handle {} "
-                "with err: {}".format(handle, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_disable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client disable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.disableNotifyCharacteristic(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to disable characteristic notifications for handle {} "
-                "with err: {}".format(peer_identifier, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_read_descriptor_by_handle(self, peer_identifier, handle):
-        """ Perform a GATT Client read Descriptor to remote peer GATT server
-        database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The Descriptor handle.
-        Returns:
-            Value of Descriptor if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.readDescriptorById(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read descriptor for handle {} with err: {}".format(
-                    peer_identifier, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_write_descriptor_by_handle(self, peer_identifier, handle,
-                                               offset, value):
-        """ Perform a GATT Client write Descriptor to remote peer GATT server
-        database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The Descriptor handle.
-            offset: The offset to start writing at.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.writeDescriptorById(
-            handle, offset, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write descriptor for handle {} with err: {}".format(
-                    peer_identifier, result.get("error")))
-            return None
-        return True
-
-    def gatt_connect(self, peer_identifier, transport, autoconnect):
-        """ Perform a GATT connection to a perihperal.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            transport: Not implemented.
-            autoconnect: Not implemented.
-        Returns:
-            True if success, False if failure.
-        """
-        connection_result = self.device.sl4f.gattc_lib.bleConnectToPeripheral(
-            peer_identifier)
-        if connection_result.get("error") is not None:
-            self.log.error("Failed to connect to peer id {}: {}".format(
-                peer_identifier, connection_result.get("error")))
-            return False
-        return True
-
-    def gatt_client_refresh(self, peer_identifier):
-        """ Perform a GATT Client Refresh of a perihperal.
-
-        Clears the internal cache and forces a refresh of the services from the
-        remote device. In Fuchsia there is no FIDL api to automatically do this
-        yet. Therefore just read all Characteristics which satisfies the same
-        requirements.
-
-        Args:
-            peer_identifier: The peer to refresh.
-        """
-        self._read_all_characteristics(peer_identifier)
-
-    def gatt_client_discover_characteristic_by_uuid(self, peer_identifier,
-                                                    uuid):
-        """ Perform a GATT Client Refresh of a perihperal.
-
-        Clears the internal cache and forces a refresh of the services from the
-        remote device. In Fuchsia there is no FIDL api to automatically do this
-        yet. Therefore just read all Characteristics which satisfies the same
-        requirements.
-
-        Args:
-            peer_identifier: The peer to refresh.
-        """
-        self._read_all_characteristics(peer_identifier, uuid)
-
-    def gatt_disconnect(self, peer_identifier):
-        """ Perform a GATT disconnect from a perihperal.
-
-        Args:
-            peer_identifier: The peer to disconnect from.
-        Returns:
-            True if success, False if failure.
-        """
-        disconnect_result = self.device.sl4f.gattc_lib.bleDisconnectPeripheral(
-            peer_identifier)
-        if disconnect_result.get("error") is not None:
-            self.log.error("Failed to disconnect from peer id {}: {}".format(
-                peer_identifier, disconnect_result.get("error")))
-            return False
-        return True
-
-    def reset_bluetooth(self):
-        """Stub for Fuchsia implementation."""
-
-    def sdp_add_search(self, attribute_list, profile_id):
-        """Adds an SDP search record.
-        Args:
-            attribute_list: The list of attributes to set
-            profile_id: The profile ID to set.
-        """
-        return self.device.sl4f.sdp_lib.addSearch(attribute_list, profile_id)
-
-    def sdp_add_service(self, sdp_record):
-        """Adds an SDP service record.
-        Args:
-            sdp_record: The dictionary representing the search record to add.
-        """
-        return self.device.sl4f.sdp_lib.addService(sdp_record)
-
-    def sdp_clean_up(self):
-        """Cleans up all objects related to SDP.
-        """
-        return self.device.sl4f.sdp_lib.cleanUp()
-
-    def sdp_init(self):
-        """Initializes SDP on the device.
-        """
-        return self.device.sl4f.sdp_lib.init()
-
-    def sdp_remove_service(self, service_id):
-        """Removes a service based on an input id.
-        Args:
-            service_id: The service ID to remove.
-        """
-        return self.device.sl4f.sdp_lib.init()
-
-    def start_le_advertisement(self, adv_data, scan_response, adv_interval,
-                               connectable):
-        """ Starts an LE advertisement
-
-        Args:
-            adv_data: Advertisement data.
-            adv_interval: Advertisement interval.
-        """
-        self.device.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, adv_interval, connectable)
-
-    def stop_le_advertisement(self):
-        """ Stop active LE advertisement.
-        """
-        self.device.sl4f.ble_lib.bleStopBleAdvertising()
-
-    def setup_gatt_server(self, database):
-        """ Sets up an input GATT server.
-
-        Args:
-            database: A dictionary representing the GATT database to setup.
-        """
-        self.device.sl4f.gatts_lib.publishServer(database)
-
-    def close_gatt_server(self):
-        """ Closes an existing GATT server.
-        """
-        self.device.sl4f.gatts_lib.closeServer()
-
-    def le_scan_with_name_filter(self, name, timeout):
-        """ Scan over LE for a specific device name.
-
-        Args:
-            name: The name filter to set.
-            timeout: The timeout to wait to find the advertisement.
-        Returns:
-            Discovered device id or None
-        """
-        partial_match = True
-        return le_scan_for_device_by_name(self.device, self.device.log, name,
-                                          timeout, partial_match)
-
-    def log_info(self, log):
-        """ Log directly onto the device.
-
-        Args:
-            log: The informative log.
-        """
-        self.device.sl4f.logging_lib.logI(log)
-
-    def unbond_all_known_devices(self):
-        """ Unbond all known remote devices.
-        """
-        try:
-            device_list = self.device.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            for device_info in device_list:
-                device = device_list[device_info]
-                if device['bonded']:
-                    self.device.sl4f.bts_lib.forgetDevice(device['id'])
-        except Exception as err:
-            self.log.err("Unable to unbond all devices: {}".format(err))
-
-    def unbond_device(self, peer_identifier):
-        """ Unbond peer identifier.
-
-        Args:
-            peer_identifier: The peer identifier for the peer to unbond.
-
-        """
-        self.device.sl4f.bts_lib.forgetDevice(peer_identifier)
-
-    def _find_service_id_and_connect_to_service_for_handle(
-            self, peer_identifier, handle, uuid=False):
-        fail_err = "Failed to find handle {} in Peer database."
-        if uuid:
-            handle = handle.lower()
-        try:
-            services = self.device.sl4f.gattc_lib.listServices(peer_identifier)
-            for service in services['result']:
-                service_id = service['id']
-                self.device.sl4f.gattc_lib.connectToService(
-                    peer_identifier, service_id)
-                chars = self.device.sl4f.gattc_lib.discoverCharacteristics()
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    if uuid:
-                        char_id = char['uuid_type']
-                    if handle == char_id:
-                        return True
-                    descriptors = char['descriptors']
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        if uuid:
-                            desc_id = desc['uuid_type']
-                        if handle == desc_id:
-                            return True
-        except Exception as err:
-            self.log.error(fail_err.format(err))
-            return False
-
-    def _read_all_characteristics(self, peer_identifier, uuid=None):
-        fail_err = "Failed to read all characteristics with: {}"
-        try:
-            services = self.device.sl4f.gattc_lib.listServices(peer_identifier)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.device.sl4f.gattc_lib.connectToService(
-                    peer_identifier, service_id)
-                chars = self.device.sl4f.gattc_lib.discoverCharacteristics()
-                self.log.info(
-                    "Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    if uuid and uuid.lower() not in char_uuid.lower():
-                        continue
-                    try:
-                        read_val =  \
-                            self.device.sl4f.gattc_lib.readCharacteristicById(
-                                char_id)
-                        self.log.info(
-                            "\tCharacteristic uuid / Value: {} / {}".format(
-                                char_uuid, read_val['result']))
-                        str_value = ""
-                        for val in read_val['result']:
-                            str_value += chr(val)
-                        self.log.info("\t\tstr val: {}".format(str_value))
-                    except Exception as err:
-                        self.log.error(err)
-        except Exception as err:
-            self.log.error(fail_err.forma(err))
-
-    def _perform_read_all_descriptors(self, peer_identifier):
-        fail_err = "Failed to read all characteristics with: {}"
-        try:
-            services = self.device.sl4f.gattc_lib.listServices(peer_identifier)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.device.sl4f.gattc_lib.connectToService(
-                    peer_identifier, service_id)
-                chars = self.device.sl4f.gattc_lib.discoverCharacteristics()
-                self.log.info(
-                    "Reading descs in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    descriptors = char['descriptors']
-                    self.log.info(
-                        "\tReading descs in char uuid: {}".format(char_uuid))
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        desc_uuid = desc["uuid_type"]
-                    try:
-                        read_val = self.device.sl4f.gattc_lib.readDescriptorById(
-                            desc_id)
-                        self.log.info(
-                            "\t\tDescriptor uuid / Value: {} / {}".format(
-                                desc_uuid, read_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(fail_err.format(err))
-
-    def init_pair(self, peer_identifier, security_level, non_bondable,
-                  transport):
-        """ Send an outgoing pairing request the input peer_identifier.
-
-        Android currently does not support setting various security levels or
-        bondable modes. Making them available for other bluetooth_device
-        variants. Depending on the Address type, Android will figure out the
-        transport to pair automatically.
-
-        Args:
-            peer_identifier: A string representing the device id.
-            security_level: The security level required for this pairing request
-                represented as a u64. (Only for LE pairing)
-                Available Values
-                1 - ENCRYPTED: Encrypted without MITM protection
-                    (unauthenticated)
-                2 - AUTHENTICATED: Encrypted with MITM protection
-                    (authenticated)
-                None: No pairing security level.
-            non_bondable: A bool representing whether the pairing mode is
-                bondable or not. None is also accepted. False if bondable, True
-                if non-bondable
-            transport: A u64 representing the transport type.
-                Available Values
-                1 - BREDR: Classic BR/EDR transport
-                2 - LE: Bluetooth Low Energy Transport
-        Returns:
-            True if successful, False if failed.
-        """
-        try:
-            self.device.sl4f.bts_lib.pair(peer_identifier, security_level,
-                                          non_bondable, transport)
-            return True
-        except Exception as err:
-            fail_err = "Failed to pair to peer_identifier {} with: {}".format(
-                peer_identifier)
-            self.log.error(fail_err.format(err))
diff --git a/src/antlion/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py b/src/antlion/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py
deleted file mode 100644
index 2367b94..0000000
--- a/src/antlion/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py
+++ /dev/null
@@ -1,340 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import inspect
-import time
-from antlion import asserts
-from antlion.controllers.buds_lib.dev_utils import apollo_sink_events
-from antlion.test_utils.bt.bt_constants import bt_default_timeout
-
-
-
-def validate_controller(controller, abstract_device_class):
-    """Ensure controller has all methods in abstract_device_class.
-    Also checks method signatures to ensure parameters are satisfied.
-
-    Args:
-        controller: instance of a device controller.
-        abstract_device_class: class definition of an abstract_device interface.
-    Raises:
-         NotImplementedError: if controller is missing one or more methods.
-    """
-    ctlr_methods = inspect.getmembers(controller, predicate=callable)
-    reqd_methods = inspect.getmembers(
-        abstract_device_class, predicate=inspect.ismethod)
-    expected_func_names = {method[0] for method in reqd_methods}
-    controller_func_names = {method[0] for method in ctlr_methods}
-
-    if not controller_func_names.issuperset(expected_func_names):
-        raise NotImplementedError(
-            'Controller {} is missing the following functions: {}'.format(
-                controller.__class__.__name__,
-                repr(expected_func_names - controller_func_names)))
-
-    for func_name in expected_func_names:
-        controller_func = getattr(controller, func_name)
-        required_func = getattr(abstract_device_class, func_name)
-        required_signature = inspect.signature(required_func)
-        if inspect.signature(controller_func) != required_signature:
-            raise NotImplementedError(
-                'Method {} must have the signature {}{}.'.format(
-                    controller_func.__qualname__, controller_func.__name__,
-                    required_signature))
-
-
-class BluetoothHandsfreeAbstractDevice:
-    """Base class for all Bluetooth handsfree abstract devices.
-
-    Desired controller classes should have a corresponding Bluetooth handsfree
-    abstract device class defined in this module.
-    """
-
-    @property
-    def mac_address(self):
-        raise NotImplementedError
-
-    def accept_call(self):
-        raise NotImplementedError()
-
-    def end_call(self):
-        raise NotImplementedError()
-
-    def enter_pairing_mode(self):
-        raise NotImplementedError()
-
-    def next_track(self):
-        raise NotImplementedError()
-
-    def pause(self):
-        raise NotImplementedError()
-
-    def play(self):
-        raise NotImplementedError()
-
-    def power_off(self):
-        raise NotImplementedError()
-
-    def power_on(self):
-        raise NotImplementedError()
-
-    def previous_track(self):
-        raise NotImplementedError()
-
-    def reject_call(self):
-        raise NotImplementedError()
-
-    def volume_down(self):
-        raise NotImplementedError()
-
-    def volume_up(self):
-        raise NotImplementedError()
-
-
-class PixelBudsBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-
-    CMD_EVENT = 'EvtHex'
-
-    def __init__(self, pixel_buds_controller):
-        self.pixel_buds_controller = pixel_buds_controller
-
-    def format_cmd(self, cmd_name):
-        return self.CMD_EVENT + ' ' + apollo_sink_events.SINK_EVENTS[cmd_name]
-
-    @property
-    def mac_address(self):
-        return self.pixel_buds_controller.bluetooth_address
-
-    def accept_call(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAnswer'))
-
-    def end_call(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrCancelEnd'))
-
-    def enter_pairing_mode(self):
-        return self.pixel_buds_controller.set_pairing_mode()
-
-    def next_track(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpSkipForward'))
-
-    def pause(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpPause'))
-
-    def play(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpPlay'))
-
-    def power_off(self):
-        return self.pixel_buds_controller.power('Off')
-
-    def power_on(self):
-        return self.pixel_buds_controller.power('On')
-
-    def previous_track(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpSkipBackward'))
-
-    def reject_call(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrReject'))
-
-    def volume_down(self):
-        return self.pixel_buds_controller.volume('Down')
-
-    def volume_up(self):
-        return self.pixel_buds_controller.volume('Up')
-
-
-class EarstudioReceiverBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-    def __init__(self, earstudio_controller):
-        self.earstudio_controller = earstudio_controller
-
-    @property
-    def mac_address(self):
-        return self.earstudio_controller.mac_address
-
-    def accept_call(self):
-        return self.earstudio_controller.press_accept_call()
-
-    def end_call(self):
-        return self.earstudio_controller.press_end_call()
-
-    def enter_pairing_mode(self):
-        return self.earstudio_controller.enter_pairing_mode()
-
-    def next_track(self):
-        return self.earstudio_controller.press_next()
-
-    def pause(self):
-        return self.earstudio_controller.press_play_pause()
-
-    def play(self):
-        return self.earstudio_controller.press_play_pause()
-
-    def power_off(self):
-        return self.earstudio_controller.power_off()
-
-    def power_on(self):
-        return self.earstudio_controller.power_on()
-
-    def previous_track(self):
-        return self.earstudio_controller.press_previous()
-
-    def reject_call(self):
-        return self.earstudio_controller.press_reject_call()
-
-    def volume_down(self):
-        return self.earstudio_controller.press_volume_down()
-
-    def volume_up(self):
-        return self.earstudio_controller.press_volume_up()
-
-
-class JaybirdX3EarbudsBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-    def __init__(self, jaybird_controller):
-        self.jaybird_controller = jaybird_controller
-
-    @property
-    def mac_address(self):
-        return self.jaybird_controller.mac_address
-
-    def accept_call(self):
-        return self.jaybird_controller.press_accept_call()
-
-    def end_call(self):
-        return self.jaybird_controller.press_reject_call()
-
-    def enter_pairing_mode(self):
-        return self.jaybird_controller.enter_pairing_mode()
-
-    def next_track(self):
-        return self.jaybird_controller.press_next()
-
-    def pause(self):
-        return self.jaybird_controller.press_play_pause()
-
-    def play(self):
-        return self.jaybird_controller.press_play_pause()
-
-    def power_off(self):
-        return self.jaybird_controller.power_off()
-
-    def power_on(self):
-        return self.jaybird_controller.power_on()
-
-    def previous_track(self):
-        return self.jaybird_controller.press_previous()
-
-    def reject_call(self):
-        return self.jaybird_controller.press_reject_call()
-
-    def volume_down(self):
-        return self.jaybird_controller.press_volume_down()
-
-    def volume_up(self):
-        return self.jaybird_controller.press_volume_up()
-
-
-class AndroidHeadsetBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-    def __init__(self, ad_controller):
-        self.ad_controller = ad_controller
-
-    @property
-    def mac_address(self):
-        """Getting device mac with more stability ensurance.
-
-        Sometime, getting mac address is flaky that it returns None. Adding a
-        loop to add more ensurance of getting correct mac address.
-        """
-        device_mac = None
-        start_time = time.time()
-        end_time = start_time + bt_default_timeout
-        while not device_mac and time.time() < end_time:
-            device_mac = self.ad_controller.droid.bluetoothGetLocalAddress()
-        asserts.assert_true(device_mac, 'Can not get the MAC address')
-        return device_mac
-
-    def accept_call(self):
-        return self.ad_controller.droid.telecomAcceptRingingCall(None)
-
-    def end_call(self):
-        return self.ad_controller.droid.telecomEndCall()
-
-    def enter_pairing_mode(self):
-        self.ad_controller.droid.bluetoothStartPairingHelper(True)
-        return self.ad_controller.droid.bluetoothMakeDiscoverable()
-
-    def next_track(self):
-        return (self.ad_controller.droid.bluetoothMediaPassthrough("skipNext"))
-
-    def pause(self):
-        return self.ad_controller.droid.bluetoothMediaPassthrough("pause")
-
-    def play(self):
-        return self.ad_controller.droid.bluetoothMediaPassthrough("play")
-
-    def power_off(self):
-        return self.ad_controller.droid.bluetoothToggleState(False)
-
-    def power_on(self):
-        return self.ad_controller.droid.bluetoothToggleState(True)
-
-    def previous_track(self):
-        return (self.ad_controller.droid.bluetoothMediaPassthrough("skipPrev"))
-
-    def reject_call(self):
-        return self.ad_controller.droid.telecomCallDisconnect(
-            self.ad_controller.droid.telecomCallGetCallIds()[0])
-
-    def reset(self):
-        return self.ad_controller.droid.bluetoothFactoryReset()
-
-    def volume_down(self):
-        target_step = self.ad_controller.droid.getMediaVolume() - 1
-        target_step = max(target_step, 0)
-        return self.ad_controller.droid.setMediaVolume(target_step)
-
-    def volume_up(self):
-        target_step = self.ad_controller.droid.getMediaVolume() + 1
-        max_step = self.ad_controller.droid.getMaxMediaVolume()
-        target_step = min(target_step, max_step)
-        return self.ad_controller.droid.setMediaVolume(target_step)
-
-
-class BluetoothHandsfreeAbstractDeviceFactory:
-    """Generates a BluetoothHandsfreeAbstractDevice for any device controller.
-    """
-
-    _controller_abstract_devices = {
-        'EarstudioReceiver': EarstudioReceiverBluetoothHandsfreeAbstractDevice,
-        'JaybirdX3Earbuds': JaybirdX3EarbudsBluetoothHandsfreeAbstractDevice,
-        'ParentDevice': PixelBudsBluetoothHandsfreeAbstractDevice,
-        'AndroidDevice': AndroidHeadsetBluetoothHandsfreeAbstractDevice
-    }
-
-    def generate(self, controller):
-        class_name = controller.__class__.__name__
-        if class_name in self._controller_abstract_devices:
-            return self._controller_abstract_devices[class_name](controller)
-        else:
-            validate_controller(controller, BluetoothHandsfreeAbstractDevice)
-            return controller
diff --git a/src/antlion/test_utils/abstract_devices/wlan_device.py b/src/antlion/test_utils/abstract_devices/wlan_device.py
index 2d11a44..5891012 100644
--- a/src/antlion/test_utils/abstract_devices/wlan_device.py
+++ b/src/antlion/test_utils/abstract_devices/wlan_device.py
@@ -16,15 +16,12 @@
 
 import inspect
 import logging
-import time
 
-import antlion.test_utils.wifi.wifi_test_utils as awutils
-from antlion.utils import adb_shell_ping
-
-from antlion import asserts
 from antlion.controllers import iperf_client
-from antlion.controllers.fuchsia_device import FuchsiaDevice
 from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.test_utils.wifi import wifi_test_utils as awutils
+from antlion.utils import adb_shell_ping
 
 FUCHSIA_VALID_SECURITY_TYPES = {"none", "wep", "wpa", "wpa2", "wpa3"}
 
@@ -41,8 +38,9 @@
     elif isinstance(hardware_device, AndroidDevice):
         return AndroidWlanDevice(hardware_device)
     else:
-        raise ValueError('Unable to create WlanDevice for type %s' %
-                         type(hardware_device))
+        raise ValueError(
+            "Unable to create WlanDevice for type %s" % type(hardware_device)
+        )
 
 
 class WlanDevice(object):
@@ -64,121 +62,146 @@
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def reset_wifi(self):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def take_bug_report(self, test_name=None, begin_time=None):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def get_log(self, test_name, begin_time):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def turn_location_off_and_scan_toggle_off(self):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
-    def associate(self,
-                  target_ssid,
-                  target_pwd=None,
-                  check_connectivity=True,
-                  hidden=False,
-                  target_security=None):
+    def associate(
+        self,
+        target_ssid,
+        target_pwd=None,
+        check_connectivity=True,
+        hidden=False,
+        target_security=None,
+    ):
         """Base generic WLAN interface.  Only called if not overriden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def disconnect(self):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def get_wlan_interface_id_list(self):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def get_default_wlan_test_interface(self):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def destroy_wlan_interface(self, iface_id):
         """Base generic WLAN interface.  Only called if not overridden by
         another supported device.
         """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def send_command(self, command):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def is_connected(self, ssid=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
-    def can_ping(self,
-                 dest_ip,
-                 count=3,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+    def can_ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=25,
-             additional_ping_params=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+    def ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def hard_power_cycle(self, pdus=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def save_network(self, ssid):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def clear_saved_networks(self):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def create_iperf_client(self, test_interface=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
     def feature_is_present(self, feature: str) -> bool:
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
+        raise NotImplementedError(
+            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
+        )
 
 
 class AndroidWlanDevice(WlanDevice):
@@ -210,13 +233,15 @@
     def turn_location_off_and_scan_toggle_off(self):
         awutils.turn_location_off_and_scan_toggle_off(self.device)
 
-    def associate(self,
-                  target_ssid,
-                  target_pwd=None,
-                  key_mgmt=None,
-                  check_connectivity=True,
-                  hidden=False,
-                  target_security=None):
+    def associate(
+        self,
+        target_ssid,
+        target_pwd=None,
+        key_mgmt=None,
+        check_connectivity=True,
+        hidden=False,
+        target_security=None,
+    ):
         """Function to associate an Android WLAN device.
 
         Args:
@@ -229,20 +254,21 @@
         Returns:
             True if successfully connected to WLAN, False if not.
         """
-        network = {'SSID': target_ssid, 'hiddenSSID': hidden}
+        network = {"SSID": target_ssid, "hiddenSSID": hidden}
         if target_pwd:
-            network['password'] = target_pwd
+            network["password"] = target_pwd
         if key_mgmt:
-            network['security'] = key_mgmt
+            network["security"] = key_mgmt
         try:
             awutils.connect_to_wifi_network(
                 self.device,
                 network,
                 check_connectivity=check_connectivity,
-                hidden=hidden)
+                hidden=hidden,
+            )
             return True
         except Exception as e:
-            self.device.log.info('Failed to associated (%s)' % e)
+            self.device.log.info("Failed to associated (%s)" % e)
             return False
 
     def disconnect(self):
@@ -252,7 +278,7 @@
         pass
 
     def get_default_wlan_test_interface(self):
-        return 'wlan0'
+        return "wlan0"
 
     def destroy_wlan_interface(self, iface_id):
         pass
@@ -263,20 +289,21 @@
     def is_connected(self, ssid=None):
         wifi_info = self.device.droid.wifiGetConnectionInfo()
         if ssid:
-            return 'BSSID' in wifi_info and wifi_info['SSID'] == ssid
-        return 'BSSID' in wifi_info
+            return "BSSID" in wifi_info and wifi_info["SSID"] == ssid
+        return "BSSID" in wifi_info
 
-    def can_ping(self,
-                 dest_ip,
-                 count=3,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
-        return adb_shell_ping(self.device,
-                              dest_ip=dest_ip,
-                              count=count,
-                              timeout=timeout)
+    def can_ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
+        return adb_shell_ping(
+            self.device, dest_ip=dest_ip, count=count, timeout=timeout
+        )
 
     def ping(self, dest_ip, count=3, interval=1000, timeout=1000, size=25):
         pass
@@ -291,7 +318,7 @@
         pass
 
     def create_iperf_client(self, test_interface=None):
-        """ Returns an iperf client on the Android, without requiring a
+        """Returns an iperf client on the Android, without requiring a
         specific config.
 
         Args:
@@ -304,8 +331,8 @@
             test_interface = self.get_default_wlan_test_interface()
 
         return iperf_client.IPerfClientOverAdb(
-            android_device_or_serial=self.device,
-            test_interface=test_interface)
+            android_device_or_serial=self.device, test_interface=test_interface
+        )
 
     def feature_is_present(self, feature: str) -> bool:
         pass
@@ -321,6 +348,8 @@
         fuchsia_device: A Fuchsia WLAN device.
     """
 
+    device: FuchsiaDevice
+
     def __init__(self, fuchsia_device):
         super().__init__(fuchsia_device)
         self.identifier = fuchsia_device.ip
@@ -342,13 +371,15 @@
     def turn_location_off_and_scan_toggle_off(self):
         """Stub for Fuchsia implementation."""
 
-    def associate(self,
-                  target_ssid,
-                  target_pwd=None,
-                  key_mgmt=None,
-                  check_connectivity=True,
-                  hidden=False,
-                  target_security=None):
+    def associate(
+        self,
+        target_ssid,
+        target_pwd=None,
+        key_mgmt=None,
+        check_connectivity=True,
+        hidden=False,
+        target_security=None,
+    ):
         """Function to associate a Fuchsia WLAN device.
 
         Args:
@@ -362,70 +393,81 @@
         Returns:
             True if successfully connected to WLAN, False if not.
         """
-        if self.device.association_mechanism == 'drivers':
+        if self.device.association_mechanism == "drivers":
             bss_scan_response = self.device.sl4f.wlan_lib.wlanScanForBSSInfo()
-            if bss_scan_response.get('error'):
-                self.log.error('Scan for BSS info failed. Err: %s' %
-                               bss_scan_response['error'])
+            if bss_scan_response.get("error"):
+                self.log.error(
+                    "Scan for BSS info failed. Err: %s" % bss_scan_response["error"]
+                )
                 return False
 
-            bss_descs_for_ssid = bss_scan_response['result'].get(
-                target_ssid, None)
+            bss_descs_for_ssid = bss_scan_response["result"].get(target_ssid, None)
             if not bss_descs_for_ssid or len(bss_descs_for_ssid) < 1:
                 self.log.error(
-                    'Scan failed to find a BSS description for target_ssid %s'
-                    % target_ssid)
+                    "Scan failed to find a BSS description for target_ssid %s"
+                    % target_ssid
+                )
                 return False
 
             connection_response = self.device.sl4f.wlan_lib.wlanConnectToNetwork(
-                target_ssid, bss_descs_for_ssid[0], target_pwd=target_pwd)
+                target_ssid, bss_descs_for_ssid[0], target_pwd=target_pwd
+            )
             return self.device.check_connect_response(connection_response)
         else:
             return self.device.wlan_policy_controller.save_and_connect(
-                target_ssid, target_security, password=target_pwd)
+                target_ssid, target_security, password=target_pwd
+            )
 
     def disconnect(self):
         """Function to disconnect from a Fuchsia WLAN device.
-           Asserts if disconnect was not successful.
+        Asserts if disconnect was not successful.
         """
-        if self.device.association_mechanism == 'drivers':
+        if self.device.association_mechanism == "drivers":
             disconnect_response = self.device.sl4f.wlan_lib.wlanDisconnect()
             return self.device.check_disconnect_response(disconnect_response)
         else:
-            return self.device.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections(
+            return (
+                self.device.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections()
             )
 
     def status(self):
         return self.device.sl4f.wlan_lib.wlanStatus()
 
-    def can_ping(self,
-                 dest_ip,
-                 count=3,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
+    def can_ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
         return self.device.can_ping(
             dest_ip,
             count=count,
             interval=interval,
             timeout=timeout,
             size=size,
-            additional_ping_params=additional_ping_params)
+            additional_ping_params=additional_ping_params,
+        )
 
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=25,
-             additional_ping_params=None):
-        return self.device.ping(dest_ip,
-                                count=count,
-                                interval=interval,
-                                timeout=timeout,
-                                size=size,
-                                additional_ping_params=additional_ping_params)
+    def ping(
+        self,
+        dest_ip,
+        count=3,
+        interval=1000,
+        timeout=1000,
+        size=25,
+        additional_ping_params=None,
+    ):
+        return self.device.ping(
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
 
     def get_wlan_interface_id_list(self):
         """Function to list available WLAN interfaces.
@@ -433,7 +475,7 @@
         Returns:
             A list of wlan interface IDs.
         """
-        return self.device.sl4f.wlan_lib.wlanGetIfaceIdList().get('result')
+        return self.device.sl4f.wlan_lib.wlanGetIfaceIdList().get("result")
 
     def get_default_wlan_test_interface(self):
         """Returns name of the WLAN client interface"""
@@ -451,18 +493,19 @@
             True if successfully destroyed wlan interface, False if not.
         """
         result = self.device.sl4f.wlan_lib.wlanDestroyIface(iface_id)
-        if result.get('error') is None:
+        if result.get("error") is None:
             return True
         else:
-            self.log.error("Failed to destroy interface with: {}".format(
-                result.get('error')))
+            self.log.error(
+                "Failed to destroy interface with: {}".format(result.get("error"))
+            )
             return False
 
     def send_command(self, command):
         return self.device.ssh.run(str(command)).stdout
 
     def is_connected(self, ssid=None):
-        """ Determines if wlan_device is connected to wlan network.
+        """Determines if wlan_device is connected to wlan network.
 
         Args:
             ssid (optional): string, to check if device is connect to a specific
@@ -475,16 +518,15 @@
                 provided.
         """
         response = self.status()
-        if response.get('error'):
-            raise ConnectionError(
-                'Failed to get client network connection status')
-        result = response.get('result')
+        if response.get("error"):
+            raise ConnectionError("Failed to get client network connection status")
+        result = response.get("result")
         if isinstance(result, dict):
-            connected_to = result.get('Connected')
+            connected_to = result.get("Connected")
             # TODO(https://fxbug.dev/85938): Remove backwards compatibility once
             # ACTS is versioned with Fuchsia.
             if not connected_to:
-                connected_to = result.get('connected_to')
+                connected_to = result.get("connected_to")
             if not connected_to:
                 return False
 
@@ -492,36 +534,40 @@
                 # Replace encoding errors instead of raising an exception.
                 # Since `ssid` is a string, this will not affect the test
                 # for equality.
-                connected_ssid = bytearray(connected_to['ssid']).decode(
-                    encoding='utf-8', errors='replace')
+                connected_ssid = bytearray(connected_to["ssid"]).decode(
+                    encoding="utf-8", errors="replace"
+                )
                 return ssid == connected_ssid
             return True
         return False
 
     def hard_power_cycle(self, pdus):
-        self.device.reboot(reboot_type='hard', testbed_pdus=pdus)
+        self.device.reboot(reboot_type="hard", testbed_pdus=pdus)
 
     def save_network(self, target_ssid, security_type=None, target_pwd=None):
-        if self.device.association_mechanism == 'drivers':
+        if self.device.association_mechanism == "drivers":
             raise EnvironmentError(
-                'Cannot save network using the drivers. Saved networks are a '
-                'policy layer concept.')
+                "Cannot save network using the drivers. Saved networks are a "
+                "policy layer concept."
+            )
         if security_type and security_type not in FUCHSIA_VALID_SECURITY_TYPES:
-            raise TypeError('Invalid security type: %s' % security_type)
+            raise TypeError("Invalid security type: %s" % security_type)
         if not self.device.wlan_policy_controller.save_network(
-                target_ssid, security_type, password=target_pwd):
-            raise EnvironmentError('Failed to save network: %s' % target_ssid)
+            target_ssid, security_type, password=target_pwd
+        ):
+            raise EnvironmentError("Failed to save network: %s" % target_ssid)
 
     def clear_saved_networks(self):
-        if self.device.association_mechanism == 'drivers':
+        if self.device.association_mechanism == "drivers":
             raise EnvironmentError(
-                'Cannot clear saved network using the drivers. Saved networks '
-                'are a policy layer concept.')
+                "Cannot clear saved network using the drivers. Saved networks "
+                "are a policy layer concept."
+            )
         if not self.device.wlan_policy_controller.remove_all_networks():
-            raise EnvironmentError('Failed to clear saved networks')
+            raise EnvironmentError("Failed to clear saved networks")
 
     def create_iperf_client(self, test_interface=None):
-        """ Returns an iperf client on the FuchsiaDevice, without requiring a
+        """Returns an iperf client on the FuchsiaDevice, without requiring a
         specific config.
 
         Args:
@@ -540,12 +586,13 @@
 
         return iperf_client.IPerfClientOverSsh(
             {
-                'user': 'fuchsia',
-                'host': self.device.ip,
-                'ssh_config': self.device.ssh_config
+                "user": "fuchsia",
+                "host": self.device.ip,
+                "ssh_config": self.device.ssh_config,
             },
             ssh_provider=self.device.ssh,
-            test_interface=test_interface)
+            test_interface=test_interface,
+        )
 
     def feature_is_present(self, feature: str) -> bool:
         return feature in self.device.wlan_features
diff --git a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py b/src/antlion/test_utils/abstract_devices/wmm_transceiver.py
index c8b61cc..e38d91a 100644
--- a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py
+++ b/src/antlion/test_utils/abstract_devices/wmm_transceiver.py
@@ -27,22 +27,17 @@
 from antlion.controllers import iperf_client
 from antlion.controllers import iperf_server
 
-AC_VO = 'AC_VO'
-AC_VI = 'AC_VI'
-AC_BE = 'AC_BE'
-AC_BK = 'AC_BK'
+AC_VO = "AC_VO"
+AC_VI = "AC_VI"
+AC_BE = "AC_BE"
+AC_BK = "AC_BK"
 
 # TODO(fxb/61421): Add tests to check all DSCP classes are mapped to the correct
 # AC (there are many that aren't included here). Requires implementation of
 # sniffer.
-DEFAULT_AC_TO_TOS_TAG_MAP = {
-    AC_VO: '0xC0',
-    AC_VI: '0x80',
-    AC_BE: '0x0',
-    AC_BK: '0x20'
-}
-UDP = 'udp'
-TCP = 'tcp'
+DEFAULT_AC_TO_TOS_TAG_MAP = {AC_VO: "0xC0", AC_VI: "0x80", AC_BE: "0x0", AC_BK: "0x20"}
+UDP = "udp"
+TCP = "tcp"
 DEFAULT_IPERF_PORT = 5201
 DEFAULT_STREAM_TIME = 10
 DEFAULT_IP_ADDR_TIMEOUT = 15
@@ -78,13 +73,13 @@
         # If identifier is not provided as func arg, it must be provided via
         # config file.
         if not identifier:
-            identifier = config['identifier']
-        iperf_config = config['iperf_config']
+            identifier = config["identifier"]
+        iperf_config = config["iperf_config"]
 
     except KeyError as err:
         raise WmmTransceiverError(
-            'Parameter not provided as func arg, nor found in config: %s' %
-            err)
+            "Parameter not provided as func arg, nor found in config: %s" % err
+        )
 
     if wlan_devices is None:
         wlan_devices = []
@@ -92,20 +87,22 @@
     if access_points is None:
         access_points = []
 
-    port_range_start = config.get('port_range_start', DEFAULT_IPERF_PORT)
+    port_range_start = config.get("port_range_start", DEFAULT_IPERF_PORT)
 
     wd = None
     ap = None
-    if 'wlan_device' in config:
-        wd = _find_wlan_device(config['wlan_device'], wlan_devices)
-    elif 'access_point' in config:
-        ap = _find_access_point(config['access_point'], access_points)
+    if "wlan_device" in config:
+        wd = _find_wlan_device(config["wlan_device"], wlan_devices)
+    elif "access_point" in config:
+        ap = _find_access_point(config["access_point"], access_points)
 
-    return WmmTransceiver(iperf_config,
-                          identifier,
-                          wlan_device=wd,
-                          access_point=ap,
-                          port_range_start=port_range_start)
+    return WmmTransceiver(
+        iperf_config,
+        identifier,
+        wlan_device=wd,
+        access_point=ap,
+        port_range_start=port_range_start,
+    )
 
 
 def _find_wlan_device(wlan_device_identifier, wlan_devices):
@@ -124,8 +121,9 @@
     for wd in wlan_devices:
         if wlan_device_identifier == wd.identifier:
             return wd
-    raise WmmTransceiverError('No WlanDevice with identifier: %s' %
-                              wlan_device_identifier)
+    raise WmmTransceiverError(
+        "No WlanDevice with identifier: %s" % wlan_device_identifier
+    )
 
 
 def _find_access_point(access_point_ip, access_points):
@@ -143,22 +141,26 @@
     for ap in access_points:
         if ap.ssh_settings.hostname == access_point_ip:
             return ap
-    raise WmmTransceiverError('No AccessPoint with ip: %s' % access_point_ip)
+    raise WmmTransceiverError("No AccessPoint with ip: %s" % access_point_ip)
 
 
 class WmmTransceiver(object):
     """Object for handling WMM tagged streams between devices"""
-    def __init__(self,
-                 iperf_config,
-                 identifier,
-                 wlan_device=None,
-                 access_point=None,
-                 port_range_start=5201):
 
+    def __init__(
+        self,
+        iperf_config,
+        identifier,
+        wlan_device=None,
+        access_point=None,
+        port_range_start=5201,
+    ):
         self.identifier = identifier
         self.log = tracelogger.TraceLogger(
-            WmmTransceiverLoggerAdapter(logging.getLogger(),
-                                        {'identifier': self.identifier}))
+            WmmTransceiverLoggerAdapter(
+                logging.getLogger(), {"identifier": self.identifier}
+            )
+        )
         # WlanDevice or AccessPoint, that is used as the transceiver. Only one
         # will be set. This helps consolodate association, setup, teardown, etc.
         self.wlan_device = wlan_device
@@ -167,7 +169,7 @@
         # Parameters used to create IPerfClient and IPerfServer objects on
         # device
         self._iperf_config = iperf_config
-        self._test_interface = self._iperf_config.get('test_interface')
+        self._test_interface = self._iperf_config.get("test_interface")
         self._port_range_start = port_range_start
         self._next_server_port = port_range_start
 
@@ -224,26 +226,34 @@
         Returns:
             uuid: UUID object, identifier of the stream
         """
-        (receiver, access_category, bandwidth,
-         stream_time) = self._parse_stream_parameters(stream_parameters)
+        (
+            receiver,
+            access_category,
+            bandwidth,
+            stream_time,
+        ) = self._parse_stream_parameters(stream_parameters)
         uuid = uuid4()
 
-        (client, server_ip,
-         server_port) = self._get_stream_resources(uuid, receiver, subnet)
+        (client, server_ip, server_port) = self._get_stream_resources(
+            uuid, receiver, subnet
+        )
 
         self._validate_server_address(server_ip, uuid)
 
-        self.log.info('Running synchronous stream to %s WmmTransceiver' %
-                      receiver.identifier)
-        self._run_traffic(uuid,
-                          client,
-                          server_ip,
-                          server_port,
-                          self._active_streams,
-                          self._stream_results,
-                          access_category=access_category,
-                          bandwidth=bandwidth,
-                          stream_time=stream_time)
+        self.log.info(
+            "Running synchronous stream to %s WmmTransceiver" % receiver.identifier
+        )
+        self._run_traffic(
+            uuid,
+            client,
+            server_ip,
+            server_port,
+            self._active_streams,
+            self._stream_results,
+            access_category=access_category,
+            bandwidth=bandwidth,
+            stream_time=stream_time,
+        )
 
         self._return_stream_resources(uuid)
         return uuid
@@ -260,27 +270,28 @@
         Returns:
             uuid: UUID object, identifier of the stream
         """
-        (receiver, access_category, bandwidth,
-         time) = self._parse_stream_parameters(stream_parameters)
+        (receiver, access_category, bandwidth, time) = self._parse_stream_parameters(
+            stream_parameters
+        )
         uuid = uuid4()
 
-        (client, server_ip,
-         server_port) = self._get_stream_resources(uuid, receiver, subnet)
+        (client, server_ip, server_port) = self._get_stream_resources(
+            uuid, receiver, subnet
+        )
 
         self._validate_server_address(server_ip, uuid)
 
         pending_stream_config = {
-            'client': client,
-            'server_ip': server_ip,
-            'server_port': server_port,
-            'access_category': access_category,
-            'bandwidth': bandwidth,
-            'time': time
+            "client": client,
+            "server_ip": server_ip,
+            "server_port": server_port,
+            "access_category": access_category,
+            "bandwidth": bandwidth,
+            "time": time,
         }
 
         self._pending_async_streams[uuid] = pending_stream_config
-        self.log.info('Stream to %s WmmTransceiver prepared.' %
-                      receiver.identifier)
+        self.log.info("Stream to %s WmmTransceiver prepared." % receiver.identifier)
         return uuid
 
     def start_asynchronous_streams(self, start_time=None):
@@ -293,27 +304,30 @@
         """
         for uuid in self._pending_async_streams:
             pending_stream_config = self._pending_async_streams[uuid]
-            client = pending_stream_config['client']
-            server_ip = pending_stream_config['server_ip']
-            server_port = pending_stream_config['server_port']
-            access_category = pending_stream_config['access_category']
-            bandwidth = pending_stream_config['bandwidth']
-            time = pending_stream_config['time']
+            client = pending_stream_config["client"]
+            server_ip = pending_stream_config["server_ip"]
+            server_port = pending_stream_config["server_port"]
+            access_category = pending_stream_config["access_category"]
+            bandwidth = pending_stream_config["bandwidth"]
+            time = pending_stream_config["time"]
 
-            process = multiprocessing.Process(target=self._run_traffic,
-                                              args=[
-                                                  uuid, client, server_ip,
-                                                  server_port,
-                                                  self._active_streams,
-                                                  self._stream_results
-                                              ],
-                                              kwargs={
-                                                  'access_category':
-                                                  access_category,
-                                                  'bandwidth': bandwidth,
-                                                  'stream_time': time,
-                                                  'start_time': start_time
-                                              })
+            process = multiprocessing.Process(
+                target=self._run_traffic,
+                args=[
+                    uuid,
+                    client,
+                    server_ip,
+                    server_port,
+                    self._active_streams,
+                    self._stream_results,
+                ],
+                kwargs={
+                    "access_category": access_category,
+                    "bandwidth": bandwidth,
+                    "stream_time": time,
+                    "start_time": start_time,
+                },
+            )
 
             # This needs to be set here to ensure its marked active before
             # it even starts.
@@ -334,12 +348,11 @@
             timeout: time, in seconds, to wait for each running process, if any,
                 to join
         """
-        self.log.info('Cleaning up any asynchronous streams.')
+        self.log.info("Cleaning up any asynchronous streams.")
 
         # Releases resources for any streams that were prepared, but no run
         for uuid in self._pending_async_streams:
-            self.log.error(
-                'Pending asynchronous stream %s never ran. Cleaning.' % uuid)
+            self.log.error("Pending asynchronous stream %s never ran. Cleaning." % uuid)
             self._return_stream_resources(uuid)
         self._pending_async_streams.clear()
 
@@ -350,8 +363,9 @@
             process.join(timeout)
             if process.is_alive():
                 self.log.error(
-                    'Stream process failed to join in %s seconds. Terminating.'
-                    % timeout)
+                    "Stream process failed to join in %s seconds. Terminating."
+                    % timeout
+                )
                 process.terminate()
                 process.join()
         self._active_streams.clear()
@@ -384,17 +398,19 @@
 
     # Helper Functions
 
-    def _run_traffic(self,
-                     uuid,
-                     client,
-                     server_ip,
-                     server_port,
-                     active_streams,
-                     stream_results,
-                     access_category=None,
-                     bandwidth=None,
-                     stream_time=DEFAULT_STREAM_TIME,
-                     start_time=None):
+    def _run_traffic(
+        self,
+        uuid,
+        client,
+        server_ip,
+        server_port,
+        active_streams,
+        stream_results,
+        access_category=None,
+        bandwidth=None,
+        stream_time=DEFAULT_STREAM_TIME,
+        start_time=None,
+    ):
         """Runs an iperf3 stream.
 
         1. Adds stream UUID to active_streams
@@ -426,34 +442,46 @@
             with utils.SuppressLogOutput():
                 client.start_ssh()
 
-        ac_flag = ''
-        bandwidth_flag = ''
-        time_flag = '-t %s' % stream_time
+        ac_flag = ""
+        bandwidth_flag = ""
+        time_flag = "-t %s" % stream_time
 
         if access_category:
-            ac_flag = ' -S %s' % DEFAULT_AC_TO_TOS_TAG_MAP[access_category]
+            ac_flag = " -S %s" % DEFAULT_AC_TO_TOS_TAG_MAP[access_category]
 
         if bandwidth:
-            bandwidth_flag = ' -u -b %sM' % bandwidth
+            bandwidth_flag = " -u -b %sM" % bandwidth
 
-        iperf_flags = '-p %s -i 1 %s%s%s -J' % (server_port, time_flag,
-                                                ac_flag, bandwidth_flag)
+        iperf_flags = "-p %s -i 1 %s%s%s -J" % (
+            server_port,
+            time_flag,
+            ac_flag,
+            bandwidth_flag,
+        )
         if not start_time:
             start_time = time.time()
-        time_str = datetime.fromtimestamp(start_time).strftime('%H:%M:%S.%f')
+        time_str = datetime.fromtimestamp(start_time).strftime("%H:%M:%S.%f")
         self.log.info(
-            'At %s, starting %s second stream to %s:%s with (AC: %s, Bandwidth: %s)'
-            % (time_str, stream_time, server_ip, server_port, access_category,
-               bandwidth if bandwidth else 'Unlimited'))
+            "At %s, starting %s second stream to %s:%s with (AC: %s, Bandwidth: %s)"
+            % (
+                time_str,
+                stream_time,
+                server_ip,
+                server_port,
+                access_category,
+                bandwidth if bandwidth else "Unlimited",
+            )
+        )
 
         # If present, wait for stream start time
         if start_time:
             current_time = time.time()
             while current_time < start_time:
                 current_time = time.time()
-        path = client.start(server_ip, iperf_flags, '%s' % uuid)
+        path = client.start(server_ip, iperf_flags, "%s" % uuid)
         stream_results[uuid] = iperf_server.IPerfResult(
-            path, reporting_speed_units='mbps')
+            path, reporting_speed_units="mbps"
+        )
 
         if type(client) == iperf_client.IPerfClientOverSsh:
             client.close_ssh()
@@ -484,8 +512,7 @@
             uuid: UUID object, identifier of the stream
         """
         if uuid in self._active_streams:
-            raise EnvironmentError('Resource still being used by stream %s' %
-                                   uuid)
+            raise EnvironmentError("Resource still being used by stream %s" % uuid)
         (receiver, server_port) = self._reserved_servers.pop(uuid)
         receiver._release_server(server_port)
         client = self._reserved_clients.pop(uuid)
@@ -555,7 +582,7 @@
                 break
         else:
             iperf_server_config = self._iperf_config
-            iperf_server_config.update({'port': self._next_server_port})
+            iperf_server_config.update({"port": self._next_server_port})
             self._next_server_port += 1
             reserved_server = iperf_server.create([iperf_server_config])[0]
             self._iperf_server_ports[reserved_server.port] = reserved_server
@@ -566,15 +593,18 @@
         while time.time() < end_time:
             if self.wlan_device:
                 addresses = utils.get_interface_ip_addresses(
-                    self.wlan_device.device, self._test_interface)
+                    self.wlan_device.device, self._test_interface
+                )
             else:
                 addresses = reserved_server.get_interface_ip_addresses(
-                    self._test_interface)
-            for addr in addresses['ipv4_private']:
+                    self._test_interface
+                )
+            for addr in addresses["ipv4_private"]:
                 if utils.ip_in_subnet(addr, subnet):
                     return (addr, reserved_server.port)
         raise AttributeError(
-            'Reserved server has no ipv4 address in the %s subnet' % subnet)
+            "Reserved server has no ipv4 address in the %s subnet" % subnet
+        )
 
     def _release_server(self, server_port):
         """Releases reservation on IPerfServer, which was held for a stream
@@ -589,7 +619,7 @@
         self._iperf_servers[server] = AVAILABLE
 
     def _validate_server_address(self, server_ip, uuid, timeout=60):
-        """ Verifies server address can be pinged before attempting to run
+        """Verifies server address can be pinged before attempting to run
         traffic, since iperf is unforgiving when the server is unreachable.
 
         Args:
@@ -600,24 +630,25 @@
         Raises:
             WmmTransceiverError, if, after timeout, server ip is unreachable.
         """
-        self.log.info('Verifying server address (%s) is reachable.' %
-                      server_ip)
+        self.log.info("Verifying server address (%s) is reachable." % server_ip)
         end_time = time.time() + timeout
         while time.time() < end_time:
             if self.can_ping(server_ip):
                 break
             else:
                 self.log.debug(
-                    'Could not ping server address (%s). Retrying in 1 second.'
-                    % (server_ip))
+                    "Could not ping server address (%s). Retrying in 1 second."
+                    % (server_ip)
+                )
                 time.sleep(1)
         else:
             self._return_stream_resources(uuid)
-            raise WmmTransceiverError('IPerfServer address (%s) unreachable.' %
-                                      server_ip)
+            raise WmmTransceiverError(
+                "IPerfServer address (%s) unreachable." % server_ip
+            )
 
     def can_ping(self, dest_ip):
-        """ Utilizes can_ping function in wlan_device or access_point device to
+        """Utilizes can_ping function in wlan_device or access_point device to
         ping dest_ip
 
         Args:
@@ -648,18 +679,18 @@
             (receiver, access_category, bandwidth, time) as
             (WmmTransceiver, String, int, int)
         """
-        receiver = stream_parameters['receiver']
-        access_category = stream_parameters.get('access_category', None)
-        bandwidth = stream_parameters.get('bandwidth', None)
-        time = stream_parameters.get('time', DEFAULT_STREAM_TIME)
+        receiver = stream_parameters["receiver"]
+        access_category = stream_parameters.get("access_category", None)
+        bandwidth = stream_parameters.get("bandwidth", None)
+        time = stream_parameters.get("time", DEFAULT_STREAM_TIME)
         return (receiver, access_category, bandwidth, time)
 
 
 class WmmTransceiverLoggerAdapter(logging.LoggerAdapter):
     def process(self, msg, kwargs):
-        if self.extra['identifier']:
-            log_identifier = ' | %s' % self.extra['identifier']
+        if self.extra["identifier"]:
+            log_identifier = " | %s" % self.extra["identifier"]
         else:
-            log_identifier = ''
+            log_identifier = ""
         msg = "[WmmTransceiver%s] %s" % (log_identifier, msg)
         return (msg, kwargs)
diff --git a/src/antlion/test_utils/audio_analysis_lib/__init__.py b/src/antlion/test_utils/audio_analysis_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/audio_analysis_lib/audio_analysis.py b/src/antlion/test_utils/audio_analysis_lib/audio_analysis.py
deleted file mode 100644
index a4273c5..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/audio_analysis.py
+++ /dev/null
@@ -1,669 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module provides utilities to do audio data analysis."""
-
-import logging
-import numpy
-import soundfile
-from scipy.signal import blackmanharris
-from scipy.signal import iirnotch
-from scipy.signal import lfilter
-
-# The default block size of pattern matching.
-ANOMALY_DETECTION_BLOCK_SIZE = 120
-
-# Only peaks with coefficient greater than 0.01 of the first peak should be
-# considered. Note that this correspond to -40dB in the spectrum.
-DEFAULT_MIN_PEAK_RATIO = 0.01
-
-# The minimum RMS value of meaningful audio data.
-MEANINGFUL_RMS_THRESHOLD = 0.001
-
-# The minimal signal norm value.
-_MINIMUM_SIGNAL_NORM = 0.001
-
-# The default pattern mathing threshold. By experiment, this threshold
-# can tolerate normal noise of 0.3 amplitude when sine wave signal
-# amplitude is 1.
-PATTERN_MATCHING_THRESHOLD = 0.85
-
-# The default number of samples within the analysis step size that the
-# difference between two anomaly time values can be to be grouped together.
-ANOMALY_GROUPING_TOLERANCE = 1.0
-
-# Window size for peak detection.
-PEAK_WINDOW_SIZE_HZ = 20
-
-
-class RMSTooSmallError(Exception):
-    """Error when signal RMS is too small."""
-
-
-class EmptyDataError(Exception):
-    """Error when signal is empty."""
-
-
-def normalize_signal(signal, saturate_value):
-    """Normalizes the signal with respect to the saturate value.
-
-    Args:
-        signal: A list for one-channel PCM data.
-        saturate_value: The maximum value that the PCM data might be.
-
-    Returns:
-        A numpy array containing normalized signal. The normalized signal has
-            value -1 and 1 when it saturates.
-
-    """
-    signal = numpy.array(signal)
-    return signal / float(saturate_value)
-
-
-def spectral_analysis(signal,
-                      rate,
-                      min_peak_ratio=DEFAULT_MIN_PEAK_RATIO,
-                      peak_window_size_hz=PEAK_WINDOW_SIZE_HZ):
-    """Gets the dominant frequencies by spectral analysis.
-
-    Args:
-        signal: A list of numbers for one-channel PCM data. This should be
-                   normalized to [-1, 1] so the function can check if signal RMS
-                   is too small to be meaningful.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        min_peak_ratio: The minimum peak_i/peak_0 ratio such that the
-                           peaks other than the greatest one should be
-                           considered.
-                           This is to ignore peaks that are too small compared
-                           to the first peak peak_0.
-        peak_window_size_hz: The window size in Hz to find the peaks.
-                                The minimum differences between found peaks will
-                                be half of this value.
-
-    Returns:
-        A list of tuples:
-              [(peak_frequency_0, peak_coefficient_0),
-               (peak_frequency_1, peak_coefficient_1),
-               (peak_frequency_2, peak_coefficient_2), ...]
-              where the tuples are sorted by coefficients. The last
-              peak_coefficient will be no less than peak_coefficient *
-              min_peak_ratio. If RMS is less than MEANINGFUL_RMS_THRESHOLD,
-              returns [(0, 0)].
-
-    """
-    # Checks the signal is meaningful.
-    if len(signal) == 0:
-        raise EmptyDataError('Signal data is empty')
-
-    signal_rms = numpy.linalg.norm(signal) / numpy.sqrt(len(signal))
-    logging.debug('signal RMS = %s', signal_rms)
-
-    # If RMS is too small, set dominant frequency and coefficient to 0.
-    if signal_rms < MEANINGFUL_RMS_THRESHOLD:
-        logging.warning(
-            'RMS %s is too small to be meaningful. Set frequency to 0.',
-            signal_rms)
-        return [(0, 0)]
-
-    logging.debug('Doing spectral analysis ...')
-
-    # First, pass signal through a window function to mitigate spectral leakage.
-    y_conv_w = signal * numpy.hanning(len(signal))
-
-    length = len(y_conv_w)
-
-    # x_f is the frequency in Hz, y_f is the transformed coefficient.
-    x_f = _rfft_freq(length, rate)
-    y_f = 2.0 / length * numpy.fft.rfft(y_conv_w)
-
-    # y_f is complex so consider its absolute value for magnitude.
-    abs_y_f = numpy.abs(y_f)
-    threshold = max(abs_y_f) * min_peak_ratio
-
-    # Suppresses all coefficients that are below threshold.
-    for i in range(len(abs_y_f)):
-        if abs_y_f[i] < threshold:
-            abs_y_f[i] = 0
-
-    # Gets the peak detection window size in indice.
-    # x_f[1] is the frequency difference per index.
-    peak_window_size = int(peak_window_size_hz / x_f[1])
-
-    # Detects peaks.
-    peaks = peak_detection(abs_y_f, peak_window_size)
-
-    # Transform back the peak location from index to frequency.
-    results = []
-    for index, value in peaks:
-        results.append((x_f[int(index)], value))
-    return results
-
-
-def _rfft_freq(length, rate):
-    """Gets the frequency at each index of real FFT.
-
-    Args:
-        length: The window length of FFT.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-
-    Returns:
-        A numpy array containing frequency corresponding to numpy.fft.rfft
-            result at each index.
-
-    """
-    # The difference in Hz between each index.
-    val = rate / float(length)
-    # Only care half of frequencies for FFT on real signal.
-    result_length = length // 2 + 1
-    return numpy.linspace(0, (result_length - 1) * val, result_length)
-
-
-def peak_detection(array, window_size):
-    """Detects peaks in an array.
-
-    A point (i, array[i]) is a peak if array[i] is the maximum among
-    array[i - half_window_size] to array[i + half_window_size].
-    If array[i - half_window_size] to array[i + half_window_size] are all equal,
-    then there is no peak in this window.
-    Note that we only consider peak with value greater than 0.
-
-    Args:
-        array: The input array to detect peaks in. Array is a list of
-        absolute values of the magnitude of transformed coefficient.
-
-        window_size: The window to detect peaks.
-
-    Returns:
-        A list of tuples:
-              [(peak_index_1, peak_value_1), (peak_index_2, peak_value_2), ...]
-              where the tuples are sorted by peak values.
-
-    """
-    half_window_size = window_size / 2
-    length = len(array)
-
-    def mid_is_peak(array, mid, left, right):
-        """Checks if value at mid is the largest among left to right in array.
-
-        Args:
-            array: A list of numbers.
-            mid: The mid index.
-            left: The left index.
-            rigth: The right index.
-
-        Returns:
-            A tuple (is_peak, next_candidate)
-                  is_peak is True if array[index] is the maximum among numbers
-                  in array between index [left, right] inclusively.
-                  next_candidate is the index of next candidate for peak if
-                  is_peak is False. It is the index of maximum value in
-                  [mid + 1, right]. If is_peak is True, next_candidate is
-                  right + 1.
-
-        """
-        value_mid = array[int(mid)]
-        is_peak = True
-        next_peak_candidate_index = None
-
-        # Check the left half window.
-        for index in range(int(left), int(mid)):
-            if array[index] >= value_mid:
-                is_peak = False
-                break
-
-        # Mid is at the end of array.
-        if mid == right:
-            return is_peak, right + 1
-
-        # Check the right half window and also record next candidate.
-        # Favor the larger index for next_peak_candidate_index.
-        for index in range(int(right), int(mid), -1):
-            if (next_peak_candidate_index is None
-                    or array[index] > array[next_peak_candidate_index]):
-                next_peak_candidate_index = index
-
-        if array[next_peak_candidate_index] >= value_mid:
-            is_peak = False
-
-        if is_peak:
-            next_peak_candidate_index = right + 1
-
-        return is_peak, next_peak_candidate_index
-
-    results = []
-    mid = 0
-    next_candidate_idx = None
-    while mid < length:
-        left = max(0, mid - half_window_size)
-        right = min(length - 1, mid + half_window_size)
-
-        # Only consider value greater than 0.
-        if array[int(mid)] == 0:
-            mid = mid + 1
-            continue
-
-        is_peak, next_candidate_idx = mid_is_peak(array, mid, left, right)
-
-        if is_peak:
-            results.append((mid, array[int(mid)]))
-
-        # Use the next candidate found in [mid + 1, right], or right + 1.
-        mid = next_candidate_idx
-
-    # Sort the peaks by values.
-    return sorted(results, key=lambda x: x[1], reverse=True)
-
-
-def anomaly_detection(signal,
-                      rate,
-                      freq,
-                      block_size=ANOMALY_DETECTION_BLOCK_SIZE,
-                      threshold=PATTERN_MATCHING_THRESHOLD):
-    """Detects anomaly in a sine wave signal.
-
-    This method detects anomaly in a sine wave signal by matching
-    patterns of each block.
-    For each moving window of block in the test signal, checks if there
-    is any block in golden signal that is similar to this block of test signal.
-    If there is such a block in golden signal, then this block of test
-    signal is matched and there is no anomaly in this block of test signal.
-    If there is any block in test signal that is not matched, then this block
-    covers an anomaly.
-    The block of test signal starts from index 0, and proceeds in steps of
-    half block size. The overlapping of test signal blocks makes sure there must
-    be at least one block covering the transition from sine wave to anomaly.
-
-    Args:
-        signal: A 1-D array-like object for 1-channel PCM data.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        freq: The expected frequency of signal.
-        block_size: The block size in samples to detect anomaly.
-        threshold: The threshold of correlation index to be judge as matched.
-
-    Returns:
-        A list containing time markers in seconds that have an anomaly within
-            block_size samples.
-
-    """
-    if len(signal) == 0:
-        raise EmptyDataError('Signal data is empty')
-
-    golden_y = _generate_golden_pattern(rate, freq, block_size)
-
-    results = []
-
-    for start in range(0, len(signal), int(block_size / 2)):
-        end = start + block_size
-        test_signal = signal[start:end]
-        matched = _moving_pattern_matching(golden_y, test_signal, threshold)
-        if not matched:
-            results.append(start)
-
-    results = [float(x) / rate for x in results]
-
-    return results
-
-
-def get_anomaly_durations(signal,
-                          rate,
-                          freq,
-                          block_size=ANOMALY_DETECTION_BLOCK_SIZE,
-                          threshold=PATTERN_MATCHING_THRESHOLD,
-                          tolerance=ANOMALY_GROUPING_TOLERANCE):
-    """Detect anomalies in a sine wav and return their start and end times.
-
-    Run anomaly_detection function and parse resulting array of time values into
-    discrete anomalies defined by a start and end time tuple. Time values are
-    judged to be part of the same anomaly if they lie within a given tolerance
-    of half the block_size number of samples of each other.
-
-    Args:
-        signal: A 1-D array-like object for 1-channel PCM data.
-        rate (int): Sampling rate in samples per second.
-            Example inputs: 44100, 48000
-        freq (int): The expected frequency of signal.
-        block_size (int): The block size in samples to detect anomaly.
-        threshold (float): The threshold of correlation index to be judge as
-            matched.
-        tolerance (float): The number of samples greater than block_size / 2
-            that the sample distance between two anomaly time values can be and
-            still be grouped as the same anomaly.
-    Returns:
-        bounds (list): a list of (start, end) tuples where start and end are the
-            boundaries in seconds of the detected anomaly.
-    """
-    bounds = []
-    anoms = anomaly_detection(signal, rate, freq, block_size, threshold)
-    if len(anoms) == 0:
-        return bounds
-    end = anoms[0]
-    start = anoms[0]
-    for i in range(len(anoms) - 1):
-        end = anoms[i]
-        sample_diff = abs(anoms[i] - anoms[i + 1]) * rate
-        # We require a tolerance because sample_diff may be slightly off due to
-        # float rounding errors in Python.
-        if sample_diff > block_size / 2 + tolerance:
-            bounds.append((start, end))
-            start = anoms[i + 1]
-    bounds.append((start, end))
-    return bounds
-
-
-def _generate_golden_pattern(rate, freq, block_size):
-    """Generates a golden pattern of certain frequency.
-
-    The golden pattern must cover all the possibilities of waveforms in a
-    block. So, we need a golden pattern covering 1 period + 1 block size,
-    such that the test block can start anywhere in a period, and extends
-    a block size.
-
-    |period |1 bk|
-    |       |    |
-     . .     . .
-    .   .   .   .
-         . .     .
-
-    Args:
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        freq: The frequency of golden pattern.
-        block_size: The block size in samples to detect anomaly.
-
-    Returns:
-        A 1-D array for golden pattern.
-
-    """
-    samples_in_a_period = int(rate / freq) + 1
-    samples_in_golden_pattern = samples_in_a_period + block_size
-    golden_x = numpy.linspace(0.0,
-                              (samples_in_golden_pattern - 1) * 1.0 / rate,
-                              samples_in_golden_pattern)
-    golden_y = numpy.sin(freq * 2.0 * numpy.pi * golden_x)
-    return golden_y
-
-
-def _moving_pattern_matching(golden_signal, test_signal, threshold):
-    """Checks if test_signal is similar to any block of golden_signal.
-
-    Compares test signal with each block of golden signal by correlation
-    index. If there is any block of golden signal that is similar to
-    test signal, then it is matched.
-
-    Args:
-        golden_signal: A 1-D array for golden signal.
-        test_signal: A 1-D array for test signal.
-        threshold: The threshold of correlation index to be judge as matched.
-
-    Returns:
-        True if there is a match. False otherwise.
-
-        ValueError: if test signal is longer than golden signal.
-
-    """
-    if len(golden_signal) < len(test_signal):
-        raise ValueError('Test signal is longer than golden signal')
-
-    block_length = len(test_signal)
-    number_of_movings = len(golden_signal) - block_length + 1
-    correlation_indices = []
-    for moving_index in range(number_of_movings):
-        # Cuts one block of golden signal from start index.
-        # The block length is the same as test signal.
-        start = moving_index
-        end = start + block_length
-        golden_signal_block = golden_signal[start:end]
-        try:
-            correlation_index = _get_correlation_index(golden_signal_block,
-                                                       test_signal)
-        except TestSignalNormTooSmallError:
-            logging.info(
-                'Caught one block of test signal that has no meaningful norm')
-            return False
-        correlation_indices.append(correlation_index)
-
-    # Checks if the maximum correlation index is high enough.
-    max_corr = max(correlation_indices)
-    if max_corr < threshold:
-        logging.debug('Got one unmatched block with max_corr: %s', max_corr)
-        return False
-    return True
-
-
-class GoldenSignalNormTooSmallError(Exception):
-    """Exception when golden signal norm is too small."""
-
-
-class TestSignalNormTooSmallError(Exception):
-    """Exception when test signal norm is too small."""
-
-
-def _get_correlation_index(golden_signal, test_signal):
-    """Computes correlation index of two signal of same length.
-
-    Args:
-        golden_signal: An 1-D array-like object.
-        test_signal: An 1-D array-like object.
-
-    Raises:
-        ValueError: if two signal have different lengths.
-        GoldenSignalNormTooSmallError: if golden signal norm is too small
-        TestSignalNormTooSmallError: if test signal norm is too small.
-
-    Returns:
-        The correlation index.
-    """
-    if len(golden_signal) != len(test_signal):
-        raise ValueError('Only accepts signal of same length: %s, %s' %
-                         (len(golden_signal), len(test_signal)))
-
-    norm_golden = numpy.linalg.norm(golden_signal)
-    norm_test = numpy.linalg.norm(test_signal)
-    if norm_golden <= _MINIMUM_SIGNAL_NORM:
-        raise GoldenSignalNormTooSmallError(
-            'No meaningful data as norm is too small.')
-    if norm_test <= _MINIMUM_SIGNAL_NORM:
-        raise TestSignalNormTooSmallError(
-            'No meaningful data as norm is too small.')
-
-    # The 'valid' cross correlation result of two signals of same length will
-    # contain only one number.
-    correlation = numpy.correlate(golden_signal, test_signal, 'valid')[0]
-    return correlation / (norm_golden * norm_test)
-
-
-def fundamental_freq(signal, rate):
-    """Return fundamental frequency of signal by finding max in freq domain.
-    """
-    dft = numpy.fft.rfft(signal)
-    fund_freq = rate * (numpy.argmax(numpy.abs(dft)) / len(signal))
-    return fund_freq
-
-
-def rms(array):
-    """Return the root mean square of array.
-    """
-    return numpy.sqrt(numpy.mean(numpy.absolute(array)**2))
-
-
-def THDN(signal, rate, q, freq):
-    """Measure the THD+N for a signal and return the results.
-    Subtract mean to center signal around 0, remove fundamental frequency from
-    dft using notch filter and transform back into signal to get noise. Compute
-    ratio of RMS of noise signal to RMS of entire signal.
-
-    Args:
-        signal: array of values representing an audio signal.
-        rate: sample rate in Hz of the signal.
-        q: quality factor for the notch filter.
-        freq: fundamental frequency of the signal. All other frequencies
-            are noise. If not specified, will be calculated using FFT.
-    Returns:
-        THDN: THD+N ratio calculated from the ratio of RMS of pure harmonics
-            and noise signal to RMS of original signal.
-    """
-    # Normalize and window signal.
-    signal -= numpy.mean(signal)
-    windowed = signal * blackmanharris(len(signal))
-    # Find fundamental frequency to remove if not specified.
-    freq = freq or fundamental_freq(windowed, rate)
-    # Create notch filter to isolate noise.
-    w0 = freq / (rate / 2.0)
-    b, a = iirnotch(w0, q)
-    noise = lfilter(b, a, windowed)
-    # Calculate THD+N.
-    THDN = rms(noise) / rms(windowed)
-    return THDN
-
-
-def max_THDN(signal, rate, step_size, window_size, q, freq):
-    """Analyze signal with moving window and find maximum THD+N value.
-    Args:
-        signal: array representing the signal
-        rate: sample rate of the signal.
-        step_size: how many samples to move the window by for each analysis.
-        window_size: how many samples to analyze each time.
-        q: quality factor for the notch filter.
-        freq: fundamental frequency of the signal. All other frequencies
-            are noise. If not specified, will be calculated using FFT.
-    Returns:
-        greatest_THDN: the greatest THD+N value found across all windows
-    """
-    greatest_THDN = 0
-    cur = 0
-    while cur + window_size < len(signal):
-        window = signal[cur:cur + window_size]
-        res = THDN(window, rate, q, freq)
-        cur += step_size
-        if res > greatest_THDN:
-            greatest_THDN = res
-    return greatest_THDN
-
-
-def get_file_THDN(filename, q, freq=None):
-    """Get THD+N values for each channel of an audio file.
-
-    Args:
-        filename (str): path to the audio file.
-          (supported file types: http://www.mega-nerd.com/libsndfile/#Features)
-        q (float): quality factor for the notch filter.
-        freq (int|float): fundamental frequency of the signal. All other
-            frequencies are noise. If None, will be calculated with FFT.
-    Returns:
-        channel_results (list): THD+N value for each channel's signal.
-            List index corresponds to channel index.
-    """
-    audio_file = soundfile.SoundFile(filename)
-    channel_results = []
-    if audio_file.channels == 1:
-        channel_results.append(
-            THDN(signal=audio_file.read(),
-                 rate=audio_file.samplerate,
-                 q=q,
-                 freq=freq))
-    else:
-        for ch_no, channel in enumerate(audio_file.read().transpose()):
-            channel_results.append(
-                THDN(signal=channel,
-                     rate=audio_file.samplerate,
-                     q=q,
-                     freq=freq))
-    return channel_results
-
-
-def get_file_max_THDN(filename, step_size, window_size, q, freq=None):
-    """Get max THD+N value across analysis windows for each channel of file.
-
-    Args:
-        filename (str): path to the audio file.
-          (supported file types: http://www.mega-nerd.com/libsndfile/#Features)
-        step_size: how many samples to move the window by for each analysis.
-        window_size: how many samples to analyze each time.
-        q (float): quality factor for the notch filter.
-        freq (int|float): fundamental frequency of the signal. All other
-            frequencies are noise. If None, will be calculated with FFT.
-    Returns:
-        channel_results (list): max THD+N value for each channel's signal.
-            List index corresponds to channel index.
-    """
-    audio_file = soundfile.SoundFile(filename)
-    channel_results = []
-    if audio_file.channels == 1:
-        channel_results.append(
-            max_THDN(signal=audio_file.read(),
-                     rate=audio_file.samplerate,
-                     step_size=step_size,
-                     window_size=window_size,
-                     q=q,
-                     freq=freq))
-    else:
-        for ch_no, channel in enumerate(audio_file.read().transpose()):
-            channel_results.append(
-                max_THDN(signal=channel,
-                         rate=audio_file.samplerate,
-                         step_size=step_size,
-                         window_size=window_size,
-                         q=q,
-                         freq=freq))
-    return channel_results
-
-
-def get_file_anomaly_durations(filename,
-                               freq=None,
-                               block_size=ANOMALY_DETECTION_BLOCK_SIZE,
-                               threshold=PATTERN_MATCHING_THRESHOLD,
-                               tolerance=ANOMALY_GROUPING_TOLERANCE):
-    """Get durations of anomalies for each channel of audio file.
-
-    Args:
-        filename (str): path to the audio file.
-          (supported file types: http://www.mega-nerd.com/libsndfile/#Features)
-        freq (int|float): fundamental frequency of the signal. All other
-            frequencies are noise. If None, will be calculated with FFT.
-        block_size (int): The block size in samples to detect anomaly.
-        threshold (float): The threshold of correlation index to be judge as
-            matched.
-        tolerance (float): The number of samples greater than block_size / 2
-            that the sample distance between two anomaly time values can be and
-            still be grouped as the same anomaly.
-    Returns:
-        channel_results (list): anomaly durations for each channel's signal.
-            List index corresponds to channel index.
-    """
-    audio_file = soundfile.SoundFile(filename)
-    signal = audio_file.read()
-    freq = freq or fundamental_freq(signal, audio_file.samplerate)
-    channel_results = []
-    if audio_file.channels == 1:
-        channel_results.append(
-            get_anomaly_durations(signal=signal,
-                                  rate=audio_file.samplerate,
-                                  freq=freq,
-                                  block_size=block_size,
-                                  threshold=threshold,
-                                  tolerance=tolerance))
-    else:
-        for ch_no, channel in enumerate(signal.transpose()):
-            channel_results.append(
-                get_anomaly_durations(signal=channel,
-                                      rate=audio_file.samplerate,
-                                      freq=freq,
-                                      block_size=block_size,
-                                      threshold=threshold,
-                                      tolerance=tolerance))
-    return channel_results
diff --git a/src/antlion/test_utils/audio_analysis_lib/audio_data.py b/src/antlion/test_utils/audio_analysis_lib/audio_data.py
deleted file mode 100644
index 4b74730..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/audio_data.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module provides abstraction of audio data."""
-
-import numpy
-"""The dict containing information on how to parse sample from raw data.
-
-Keys: The sample format as in aplay command.
-Values: A dict containing:
-    message: Human-readable sample format.
-    dtype_str: Data type used in numpy dtype.  Check
-               https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html
-               for supported data type.
-    size_bytes: Number of bytes for one sample.
-"""
-SAMPLE_FORMATS = dict(
-    S32_LE=dict(message='Signed 32-bit integer, little-endian',
-                dtype_str='<i',
-                size_bytes=4),
-    S16_LE=dict(message='Signed 16-bit integer, little-endian',
-                dtype_str='<i',
-                size_bytes=2))
-
-
-def get_maximum_value_from_sample_format(sample_format):
-    """Gets the maximum value from sample format.
-
-    Args:
-        sample_format: A key in SAMPLE_FORMAT.
-
-    Returns:The maximum value the sample can hold + 1.
-
-    """
-    size_bits = SAMPLE_FORMATS[sample_format]['size_bytes'] * 8
-    return 1 << (size_bits - 1)
-
-
-class AudioRawDataError(Exception):
-    """Error in AudioRawData."""
-
-
-class AudioRawData(object):
-    """The abstraction of audio raw data.
-
-    @property channel: The number of channels.
-    @property channel_data: A list of lists containing samples in each channel.
-                            E.g., The third sample in the second channel is
-                            channel_data[1][2].
-    @property sample_format: The sample format which should be one of the keys
-                             in audio_data.SAMPLE_FORMATS.
-    """
-
-    def __init__(self, binary, channel, sample_format):
-        """Initializes an AudioRawData.
-
-        Args:
-            binary: A string containing binary data. If binary is not None,
-                       The samples in binary will be parsed and be filled into
-                       channel_data.
-            channel: The number of channels.
-            sample_format: One of the keys in audio_data.SAMPLE_FORMATS.
-        """
-        self.channel = channel
-        self.channel_data = [[] for _ in range(self.channel)]
-        self.sample_format = sample_format
-        if binary:
-            self.read_binary(binary)
-
-    def read_binary(self, binary):
-        """Reads samples from binary and fills channel_data.
-
-        Reads samples of fixed width from binary string into a numpy array
-        and shapes them into each channel.
-
-        Args:
-            binary: A string containing binary data.
-        """
-        sample_format_dict = SAMPLE_FORMATS[self.sample_format]
-
-        # The data type used in numpy fromstring function. For example,
-        # <i4 for 32-bit signed int.
-        np_dtype = '%s%d' % (sample_format_dict['dtype_str'],
-                             sample_format_dict['size_bytes'])
-
-        # Reads data from a string into 1-D array.
-        np_array = numpy.fromstring(binary, dtype=np_dtype)
-
-        n_frames = len(np_array) / self.channel
-        # Reshape np_array into an array of shape (n_frames, channel).
-        np_array = np_array.reshape(int(n_frames), self.channel)
-        # Transpose np_arrya so it becomes of shape (channel, n_frames).
-        self.channel_data = np_array.transpose()
diff --git a/src/antlion/test_utils/audio_analysis_lib/audio_quality_measurement.py b/src/antlion/test_utils/audio_analysis_lib/audio_quality_measurement.py
deleted file mode 100644
index c347636..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/audio_quality_measurement.py
+++ /dev/null
@@ -1,927 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module provides utilities to detect some artifacts and measure the
-    quality of audio."""
-
-import logging
-import math
-import numpy
-
-import antlion.test_utils.audio_analysis_lib.audio_analysis as audio_analysis
-
-# The input signal should be one sine wave with fixed frequency which
-# can have silence before and/or after sine wave.
-# For example:
-#   silence      sine wave      silence
-#  -----------|VVVVVVVVVVVVV|-----------
-#     (a)           (b)           (c)
-# This module detects these artifacts:
-#   1. Detect noise in (a) and (c).
-#   2. Detect delay in (b).
-#   3. Detect burst in (b).
-# Assume the transitions between (a)(b) and (b)(c) are smooth and
-# amplitude increases/decreases linearly.
-# This module will detect artifacts in the sine wave.
-# This module also estimates the equivalent noise level by teager operator.
-# This module also detects volume changes in the sine wave. However, volume
-# changes may be affected by delay or burst.
-# Some artifacts may cause each other.
-
-# In this module, amplitude and frequency are derived from Hilbert transform.
-# Both amplitude and frequency are a function of time.
-
-# To detect each artifact, each point will be compared with
-# average amplitude of its block. The block size will be 1.5 ms.
-# Using average amplitude can mitigate the error caused by
-# Hilbert transform and noise.
-# In some case, for more accuracy, the block size may be modified
-# to other values.
-DEFAULT_BLOCK_SIZE_SECS = 0.0015
-
-# If the difference between average frequency of this block and
-# dominant frequency of full signal is less than 0.5 times of
-# dominant frequency, this block is considered to be within the
-# sine wave. In most cases, if there is no sine wave(only noise),
-# average frequency will be much greater than 5 times of
-# dominant frequency.
-# Also, for delay during playback, the frequency will be about 0
-# in perfect situation or much greater than 5 times of dominant
-# frequency if it's noised.
-DEFAULT_FREQUENCY_ERROR = 0.5
-
-# If the amplitude of some sample is less than 0.6 times of the
-# average amplitude of its left/right block, it will be considered
-# as a delay during playing.
-DEFAULT_DELAY_AMPLITUDE_THRESHOLD = 0.6
-
-# If the average amplitude of the block before or after playing
-# is more than 0.5 times to the average amplitude of the wave,
-# it will be considered as a noise artifact.
-DEFAULT_NOISE_AMPLITUDE_THRESHOLD = 0.5
-
-# In the sine wave, if the amplitude is more than 1.4 times of
-# its left side and its right side, it will be considered as
-# a burst.
-DEFAULT_BURST_AMPLITUDE_THRESHOLD = 1.4
-
-# When detecting burst, if the amplitude is lower than 0.5 times
-# average amplitude, we ignore it.
-DEFAULT_BURST_TOO_SMALL = 0.5
-
-# For a signal which is the combination of sine wave with fixed frequency f and
-# amplitude 1 and standard noise with amplitude k, the average teager value is
-# nearly linear to the noise level k.
-# Given frequency f, we simulate a sine wave with default noise level and
-# calculate its average teager value. Then, we can estimate the equivalent
-# noise level of input signal by the average teager value of input signal.
-DEFAULT_STANDARD_NOISE = 0.005
-
-# For delay, burst, volume increasing/decreasing, if two delay(
-# burst, volume increasing/decreasing) happen within
-# DEFAULT_SAME_EVENT_SECS seconds, we consider they are the
-# same event.
-DEFAULT_SAME_EVENT_SECS = 0.001
-
-# When detecting increasing/decreasing volume of signal, if the amplitude
-# is lower than 0.1 times average amplitude, we ignore it.
-DEFAULT_VOLUME_CHANGE_TOO_SMALL = 0.1
-
-# If average amplitude of right block is less/more than average
-# amplitude of left block times DEFAULT_VOLUME_CHANGE_AMPLITUDE, it will be
-# considered as decreasing/increasing on volume.
-DEFAULT_VOLUME_CHANGE_AMPLITUDE = 0.1
-
-# If the increasing/decreasing volume event is too close to the start or the end
-# of sine wave, we consider its volume change as part of rising/falling phase in
-# the start/end.
-NEAR_START_OR_END_SECS = 0.01
-
-# After applying Hilbert transform, the resulting amplitude and frequency may be
-# extremely large in the start and/or the end part. Thus, we will append zeros
-# before and after the whole wave for 0.1 secs.
-APPEND_ZEROS_SECS = 0.1
-
-# If the noise event is too close to the start or the end of the data, we
-# consider its noise as part of artifacts caused by edge effect of Hilbert
-# transform.
-# For example, originally, the data duration is 10 seconds.
-# We append 0.1 seconds of zeros in the beginning and the end of the data, so
-# the data becomes 10.2 seocnds long.
-# Then, we apply Hilbert transform to 10.2 seconds of data.
-# Near 0.1 seconds and 10.1 seconds, there will be edge effect of Hilbert
-# transform. We do not want these be treated as noise.
-# If NEAR_DATA_START_OR_END_SECS is set to 0.01, then the noise happened
-# at [0, 0.11] and [10.09, 10.1] will be ignored.
-NEAR_DATA_START_OR_END_SECS = 0.01
-
-# If the noise event is too close to the start or the end of the sine wave in
-# the data, we consider its noise as part of artifacts caused by edge effect of
-# Hilbert transform.
-# A |-------------|vvvvvvvvvvvvvvvvvvvvvvv|-------------|
-# B |ooooooooo| d |                       | d |ooooooooo|
-#
-# A is full signal. It contains a sine wave and silence before and after sine
-# wave.
-# In B, |oooo| shows the parts that we are going to check for noise before/after
-# sine wave. | d | is determined by NEAR_SINE_START_OR_END_SECS.
-NEAR_SINE_START_OR_END_SECS = 0.01
-
-
-class SineWaveNotFound(Exception):
-    """Error when there's no sine wave found in the signal"""
-
-
-def hilbert(x):
-    """Hilbert transform copied from scipy.
-
-    More information can be found here:
-    http://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.hilbert.html
-
-    Args:
-        x: Real signal data to transform.
-
-    Returns:
-        Analytic signal of x, we can further extract amplitude and
-              frequency from it.
-
-    """
-    x = numpy.asarray(x)
-    if numpy.iscomplexobj(x):
-        raise ValueError("x must be real.")
-    axis = -1
-    N = x.shape[axis]
-    if N <= 0:
-        raise ValueError("N must be positive.")
-
-    Xf = numpy.fft.fft(x, N, axis=axis)
-    h = numpy.zeros(N)
-    if N % 2 == 0:
-        h[0] = h[N // 2] = 1
-        h[1:N // 2] = 2
-    else:
-        h[0] = 1
-        h[1:(N + 1) // 2] = 2
-
-    if len(x.shape) > 1:
-        ind = [newaxis] * x.ndim
-        ind[axis] = slice(None)
-        h = h[ind]
-    x = numpy.fft.ifft(Xf * h, axis=axis)
-    return x
-
-
-def noised_sine_wave(frequency, rate, noise_level):
-    """Generates a sine wave of 2 second with specified noise level.
-
-    Args:
-        frequency: Frequency of sine wave.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        noise_level: Required noise level.
-
-    Returns:
-        A sine wave with specified noise level.
-
-    """
-    wave = []
-    for index in range(0, rate * 2):
-        sample = 2.0 * math.pi * frequency * float(index) / float(rate)
-        sine_wave = math.sin(sample)
-        noise = noise_level * numpy.random.standard_normal()
-        wave.append(sine_wave + noise)
-    return wave
-
-
-def average_teager_value(wave, amplitude):
-    """Computes the normalized average teager value.
-
-    After averaging the teager value, we will normalize the value by
-    dividing square of amplitude.
-
-    Args:
-        wave: Wave to apply teager operator.
-        amplitude: Average amplitude of given wave.
-
-    Returns:
-        Average teager value.
-
-    """
-    teager_value, length = 0, len(wave)
-    for i in range(1, length - 1):
-        ith_teager_value = abs(wave[i] * wave[i] - wave[i - 1] * wave[i + 1])
-        ith_teager_value *= max(1, abs(wave[i]))
-        teager_value += ith_teager_value
-    teager_value = (float(teager_value) / length) / (amplitude**2)
-    return teager_value
-
-
-def noise_level(amplitude, frequency, rate, teager_value_of_input):
-    """Computes the noise level compared with standard_noise.
-
-    For a signal which is the combination of sine wave with fixed frequency f
-    and amplitude 1 and standard noise with amplitude k, the average teager
-    value is nearly linear to the noise level k.
-    Thus, we can compute the average teager value of a sine wave with
-    standard_noise. Then, we can estimate the noise level of given input.
-
-    Args:
-        amplitude: Amplitude of input audio.
-        frequency: Dominant frequency of input audio.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        teager_value_of_input: Average teager value of input audio.
-
-    Returns:
-        A float value denotes the audio is equivalent to have how many times of
-            noise compared with its amplitude.For example, 0.02 denotes that the
-            wave has a noise which has standard distribution with standard
-            deviation being 0.02 times the amplitude of the wave.
-
-    """
-    standard_noise = DEFAULT_STANDARD_NOISE
-
-    # Generates the standard sine wave with stdandard_noise level of noise.
-    standard_wave = noised_sine_wave(frequency, rate, standard_noise)
-
-    # Calculates the average teager value.
-    teager_value_of_std_wave = average_teager_value(standard_wave, amplitude)
-
-    return (teager_value_of_input / teager_value_of_std_wave) * standard_noise
-
-
-def error(f1, f2):
-    """Calculates the relative error between f1 and f2.
-
-    Args:
-        f1: Exact value.
-        f2: Test value.
-
-    Returns:
-        Relative error between f1 and f2.
-
-    """
-    return abs(float(f1) - float(f2)) / float(f1)
-
-
-def hilbert_analysis(signal, rate, block_size):
-    """Finds amplitude and frequency of each time of signal by Hilbert transform.
-
-    Args:
-        signal: The wave to analyze.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        block_size: The size of block to transform.
-
-    Returns:
-        A tuple of list: (amplitude, frequency) composed of amplitude and
-            frequency of each time.
-
-    """
-    # To apply Hilbert transform, the wave will be transformed
-    # segment by segment. For each segment, its size will be
-    # block_size and we will only take middle part of it.
-    # Thus, each segment looks like: |-----|=====|=====|-----|.
-    # "=...=" part will be taken while "-...-" part will be ignored.
-    #
-    # The whole size of taken part will be half of block_size
-    # which will be hilbert_block.
-    # The size of each ignored part will be half of hilbert_block
-    # which will be half_hilbert_block.
-    hilbert_block = block_size // 2
-    half_hilbert_block = hilbert_block // 2
-    # As mentioned above, for each block, we will only take middle
-    # part of it. Thus, the whole transformation will be completed as:
-    # |=====|=====|-----|           |-----|=====|=====|-----|
-    #       |-----|=====|=====|-----|           |-----|=====|=====|
-    #                   |-----|=====|=====|-----|
-    # Specially, beginning and ending part may not have ignored part.
-    length = len(signal)
-    result = []
-    for left_border in range(0, length, hilbert_block):
-        right_border = min(length, left_border + hilbert_block)
-        temp_left_border = max(0, left_border - half_hilbert_block)
-        temp_right_border = min(length, right_border + half_hilbert_block)
-        temp = hilbert(signal[temp_left_border:temp_right_border])
-        for index in range(left_border, right_border):
-            result.append(temp[index - temp_left_border])
-    result = numpy.asarray(result)
-    amplitude = numpy.abs(result)
-    phase = numpy.unwrap(numpy.angle(result))
-    frequency = numpy.diff(phase) / (2.0 * numpy.pi) * rate
-    #frequency.append(frequency[len(frequency)-1])
-    frequecny = numpy.append(frequency, frequency[len(frequency) - 1])
-    return (amplitude, frequency)
-
-
-def find_block_average_value(arr, side_block_size, block_size):
-    """For each index, finds average value of its block, left block, right block.
-
-    It will find average value for each index in the range.
-
-    For each index, the range of its block is
-        [max(0, index - block_size / 2), min(length - 1, index + block_size / 2)]
-    For each index, the range of its left block is
-        [max(0, index - size_block_size), index]
-    For each index, the range of its right block is
-        [index, min(length - 1, index + side_block_size)]
-
-    Args:
-        arr: The array to be computed.
-        side_block_size: the size of the left_block and right_block.
-        block_size: the size of the block.
-
-    Returns:
-        A tuple of lists: (left_block_average_array,
-                                 right_block_average_array,
-                                 block_average_array)
-    """
-    length = len(arr)
-    left_border, right_border = 0, 1
-    left_block_sum = arr[0]
-    right_block_sum = arr[0]
-    left_average_array = numpy.zeros(length)
-    right_average_array = numpy.zeros(length)
-    block_average_array = numpy.zeros(length)
-    for index in range(0, length):
-        while left_border < index - side_block_size:
-            left_block_sum -= arr[left_border]
-            left_border += 1
-        while right_border < min(length, index + side_block_size):
-            right_block_sum += arr[right_border]
-            right_border += 1
-
-        left_average_value = float(left_block_sum) / (index - left_border + 1)
-        right_average_value = float(right_block_sum) / (right_border - index)
-        left_average_array[index] = left_average_value
-        right_average_array[index] = right_average_value
-
-        if index + 1 < length:
-            left_block_sum += arr[index + 1]
-        right_block_sum -= arr[index]
-    left_border, right_border = 0, 1
-    block_sum = 0
-    for index in range(0, length):
-        while left_border < index - block_size / 2:
-            block_sum -= arr[left_border]
-            left_border += 1
-        while right_border < min(length, index + block_size / 2):
-            block_sum += arr[right_border]
-            right_border += 1
-
-        average_value = float(block_sum) / (right_border - left_border)
-        block_average_array[index] = average_value
-    return (left_average_array, right_average_array, block_average_array)
-
-
-def find_start_end_index(dominant_frequency, block_frequency_delta, block_size,
-                         frequency_error_threshold):
-    """Finds start and end index of sine wave.
-
-    For each block with size of block_size, we check that whether its frequency
-    is close enough to the dominant_frequency. If yes, we will consider this
-    block to be within the sine wave.
-    Then, it will return the start and end index of sine wave indicating that
-    sine wave is between [start_index, end_index)
-    It's okay if the whole signal only contains sine wave.
-
-    Args:
-        dominant_frequency: Dominant frequency of signal.
-        block_frequency_delta: Average absolute difference between dominant
-                                  frequency and frequency of each block. For
-                                  each index, its block is
-                                  [max(0, index - block_size / 2),
-                                   min(length - 1, index + block_size / 2)]
-        block_size: Block size in samples.
-
-    Returns:
-        A tuple composed of (start_index, end_index)
-
-    """
-    length = len(block_frequency_delta)
-
-    # Finds the start/end time index of playing based on dominant frequency
-    start_index, end_index = length - 1, 0
-    for index in range(0, length):
-        left_border = max(0, index - block_size / 2)
-        right_border = min(length - 1, index + block_size / 2)
-        frequency_error = block_frequency_delta[index] / dominant_frequency
-        if frequency_error < frequency_error_threshold:
-            start_index = min(start_index, left_border)
-            end_index = max(end_index, right_border + 1)
-    return (start_index, end_index)
-
-
-def noise_detection(start_index, end_index, block_amplitude, average_amplitude,
-                    rate, noise_amplitude_threshold):
-    """Detects noise before/after sine wave.
-
-    If average amplitude of some sample's block before start of wave or after
-    end of wave is more than average_amplitude times noise_amplitude_threshold,
-    it will be considered as a noise.
-
-    Args:
-        start_index: Start index of sine wave.
-        end_index: End index of sine wave.
-        block_amplitude: An array for average amplitude of each block, where
-                            amplitude is computed from Hilbert transform.
-        average_amplitude: Average amplitude of sine wave.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        noise_amplitude_threshold: If the average amplitude of a block is
-                        higher than average amplitude of the wave times
-                        noise_amplitude_threshold, it will be considered as
-                        noise before/after playback.
-
-    Returns:
-        A tuple of lists indicating the time that noise happens:
-            (noise_before_playing, noise_after_playing).
-
-    """
-    length = len(block_amplitude)
-    amplitude_threshold = average_amplitude * noise_amplitude_threshold
-    same_event_samples = rate * DEFAULT_SAME_EVENT_SECS
-
-    # Detects noise before playing.
-    noise_time_point = []
-    last_noise_end_time_point = []
-    previous_noise_index = None
-    times = 0
-    for index in range(0, length):
-        # Ignore noise too close to the beginning or the end of sine wave.
-        # Check the docstring of NEAR_SINE_START_OR_END_SECS.
-        if ((start_index - rate * NEAR_SINE_START_OR_END_SECS) <= index
-                and (index < end_index + rate * NEAR_SINE_START_OR_END_SECS)):
-            continue
-
-        # Ignore noise too close to the beginning or the end of original data.
-        # Check the docstring of NEAR_DATA_START_OR_END_SECS.
-        if (float(index) / rate <=
-                NEAR_DATA_START_OR_END_SECS + APPEND_ZEROS_SECS):
-            continue
-        if (float(length - index) / rate <=
-                NEAR_DATA_START_OR_END_SECS + APPEND_ZEROS_SECS):
-            continue
-        if block_amplitude[index] > amplitude_threshold:
-            same_event = False
-            if previous_noise_index:
-                same_event = (index -
-                              previous_noise_index) < same_event_samples
-            if not same_event:
-                index_start_sec = float(index) / rate - APPEND_ZEROS_SECS
-                index_end_sec = float(index + 1) / rate - APPEND_ZEROS_SECS
-                noise_time_point.append(index_start_sec)
-                last_noise_end_time_point.append(index_end_sec)
-                times += 1
-            index_end_sec = float(index + 1) / rate - APPEND_ZEROS_SECS
-            last_noise_end_time_point[times - 1] = index_end_sec
-            previous_noise_index = index
-
-    noise_before_playing, noise_after_playing = [], []
-    for i in range(times):
-        duration = last_noise_end_time_point[i] - noise_time_point[i]
-        if noise_time_point[i] < float(start_index) / rate - APPEND_ZEROS_SECS:
-            noise_before_playing.append((noise_time_point[i], duration))
-        else:
-            noise_after_playing.append((noise_time_point[i], duration))
-
-    return (noise_before_playing, noise_after_playing)
-
-
-def delay_detection(start_index, end_index, block_amplitude, average_amplitude,
-                    dominant_frequency, rate, left_block_amplitude,
-                    right_block_amplitude, block_frequency_delta,
-                    delay_amplitude_threshold, frequency_error_threshold):
-    """Detects delay during playing.
-
-    For each sample, we will check whether the average amplitude of its block
-    is less than average amplitude of its left block and its right block times
-    delay_amplitude_threshold. Also, we will check whether the frequency of
-    its block is far from the dominant frequency.
-    If at least one constraint fulfilled, it will be considered as a delay.
-
-    Args:
-        start_index: Start index of sine wave.
-        end_index: End index of sine wave.
-        block_amplitude: An array for average amplitude of each block, where
-                            amplitude is computed from Hilbert transform.
-        average_amplitude: Average amplitude of sine wave.
-        dominant_frequency: Dominant frequency of signal.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        left_block_amplitude: Average amplitude of left block of each index.
-                                Ref to find_block_average_value function.
-        right_block_amplitude: Average amplitude of right block of each index.
-                                Ref to find_block_average_value function.
-        block_frequency_delta: Average absolute difference frequency to
-                                dominant frequency of block of each index.
-                                Ref to find_block_average_value function.
-        delay_amplitude_threshold: If the average amplitude of a block is
-                        lower than average amplitude of the wave times
-                        delay_amplitude_threshold, it will be considered
-                        as delay.
-        frequency_error_threshold: Ref to DEFAULT_FREQUENCY_ERROR
-
-    Returns:
-        List of delay occurrence:
-                [(time_1, duration_1), (time_2, duration_2), ...],
-              where time and duration are in seconds.
-
-    """
-    delay_time_points = []
-    last_delay_end_time_points = []
-    previous_delay_index = None
-    times = 0
-    same_event_samples = rate * DEFAULT_SAME_EVENT_SECS
-    start_time = float(start_index) / rate - APPEND_ZEROS_SECS
-    end_time = float(end_index) / rate - APPEND_ZEROS_SECS
-    for index in range(int(start_index), int(end_index)):
-        if block_amplitude[
-                index] > average_amplitude * delay_amplitude_threshold:
-            continue
-        now_time = float(index) / rate - APPEND_ZEROS_SECS
-        if abs(now_time - start_time) < NEAR_START_OR_END_SECS:
-            continue
-        if abs(now_time - end_time) < NEAR_START_OR_END_SECS:
-            continue
-        # If amplitude less than its left/right side and small enough,
-        # it will be considered as a delay.
-        amp_threshold = average_amplitude * delay_amplitude_threshold
-        left_threshold = delay_amplitude_threshold * left_block_amplitude[index]
-        amp_threshold = min(amp_threshold, left_threshold)
-        right_threshold = delay_amplitude_threshold * right_block_amplitude[
-            index]
-        amp_threshold = min(amp_threshold, right_threshold)
-
-        frequency_error = block_frequency_delta[index] / dominant_frequency
-
-        amplitude_too_small = block_amplitude[index] < amp_threshold
-        frequency_not_match = frequency_error > frequency_error_threshold
-
-        if amplitude_too_small or frequency_not_match:
-            same_event = False
-            if previous_delay_index:
-                same_event = (index -
-                              previous_delay_index) < same_event_samples
-            if not same_event:
-                index_start_sec = float(index) / rate - APPEND_ZEROS_SECS
-                index_end_sec = float(index + 1) / rate - APPEND_ZEROS_SECS
-                delay_time_points.append(index_start_sec)
-                last_delay_end_time_points.append(index_end_sec)
-                times += 1
-            previous_delay_index = index
-            index_end_sec = float(index + 1) / rate - APPEND_ZEROS_SECS
-            last_delay_end_time_points[times - 1] = index_end_sec
-
-    delay_list = []
-    for i in range(len(delay_time_points)):
-        duration = last_delay_end_time_points[i] - delay_time_points[i]
-        delay_list.append((delay_time_points[i], duration))
-    return delay_list
-
-
-def burst_detection(start_index, end_index, block_amplitude, average_amplitude,
-                    dominant_frequency, rate, left_block_amplitude,
-                    right_block_amplitude, block_frequency_delta,
-                    burst_amplitude_threshold, frequency_error_threshold):
-    """Detects burst during playing.
-
-    For each sample, we will check whether the average amplitude of its block is
-    more than average amplitude of its left block and its right block times
-    burst_amplitude_threshold. Also, we will check whether the frequency of
-    its block is not compatible to the dominant frequency.
-    If at least one constraint fulfilled, it will be considered as a burst.
-
-    Args:
-        start_index: Start index of sine wave.
-        end_index: End index of sine wave.
-        block_amplitude: An array for average amplitude of each block, where
-                            amplitude is computed from Hilbert transform.
-        average_amplitude: Average amplitude of sine wave.
-        dominant_frequency: Dominant frequency of signal.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        left_block_amplitude: Average amplitude of left block of each index.
-                                Ref to find_block_average_value function.
-        right_block_amplitude: Average amplitude of right block of each index.
-                                Ref to find_block_average_value function.
-        block_frequency_delta: Average absolute difference frequency to
-                                dominant frequency of block of each index.
-        burst_amplitude_threshold: If the amplitude is higher than average
-                            amplitude of its left block and its right block
-                            times burst_amplitude_threshold. It will be
-                            considered as a burst.
-        frequency_error_threshold: Ref to DEFAULT_FREQUENCY_ERROR
-
-    Returns:
-        List of burst occurence: [time_1, time_2, ...],
-              where time is in seconds.
-
-    """
-    burst_time_points = []
-    previous_burst_index = None
-    same_event_samples = rate * DEFAULT_SAME_EVENT_SECS
-    for index in range(int(start_index), int(end_index)):
-        # If amplitude higher than its left/right side and large enough,
-        # it will be considered as a burst.
-        if block_amplitude[
-                index] <= average_amplitude * DEFAULT_BURST_TOO_SMALL:
-            continue
-        if abs(index - start_index) < rate * NEAR_START_OR_END_SECS:
-            continue
-        if abs(index - end_index) < rate * NEAR_START_OR_END_SECS:
-            continue
-        amp_threshold = average_amplitude * DEFAULT_BURST_TOO_SMALL
-        left_threshold = burst_amplitude_threshold * left_block_amplitude[index]
-        amp_threshold = max(amp_threshold, left_threshold)
-        right_threshold = burst_amplitude_threshold * right_block_amplitude[
-            index]
-        amp_threshold = max(amp_threshold, right_threshold)
-
-        frequency_error = block_frequency_delta[index] / dominant_frequency
-
-        amplitude_too_large = block_amplitude[index] > amp_threshold
-        frequency_not_match = frequency_error > frequency_error_threshold
-
-        if amplitude_too_large or frequency_not_match:
-            same_event = False
-            if previous_burst_index:
-                same_event = index - previous_burst_index < same_event_samples
-            if not same_event:
-                burst_time_points.append(
-                    float(index) / rate - APPEND_ZEROS_SECS)
-            previous_burst_index = index
-
-    return burst_time_points
-
-
-def changing_volume_detection(start_index, end_index, average_amplitude, rate,
-                              left_block_amplitude, right_block_amplitude,
-                              volume_changing_amplitude_threshold):
-    """Finds volume changing during playback.
-
-    For each index, we will compare average amplitude of its left block and its
-    right block. If average amplitude of right block is more than average
-    amplitude of left block times (1 + DEFAULT_VOLUME_CHANGE_AMPLITUDE), it will
-    be considered as an increasing volume. If the one of right block is less
-    than that of left block times (1 - DEFAULT_VOLUME_CHANGE_AMPLITUDE), it will
-    be considered as a decreasing volume.
-
-    Args:
-        start_index: Start index of sine wave.
-        end_index: End index of sine wave.
-        average_amplitude: Average amplitude of sine wave.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        left_block_amplitude: Average amplitude of left block of each index.
-                                Ref to find_block_average_value function.
-        right_block_amplitude: Average amplitude of right block of each index.
-                                Ref to find_block_average_value function.
-        volume_changing_amplitude_threshold: If the average amplitude of right
-                                                block is higher or lower than
-                                                that of left one times this
-                                                value, it will be considered as
-                                                a volume change.
-                                                Also refer to
-                                                DEFAULT_VOLUME_CHANGE_AMPLITUDE
-
-    Returns:
-        List of volume changing composed of 1 for increasing and -1 for
-            decreasing.
-
-    """
-    length = len(left_block_amplitude)
-
-    # Detects rising and/or falling volume.
-    previous_rising_index, previous_falling_index = None, None
-    changing_time = []
-    changing_events = []
-    amplitude_threshold = average_amplitude * DEFAULT_VOLUME_CHANGE_TOO_SMALL
-    same_event_samples = rate * DEFAULT_SAME_EVENT_SECS
-    for index in range(int(start_index), int(end_index)):
-        # Skips if amplitude is too small.
-        if left_block_amplitude[index] < amplitude_threshold:
-            continue
-        if right_block_amplitude[index] < amplitude_threshold:
-            continue
-        # Skips if changing is from start or end time
-        if float(abs(start_index - index)) / rate < NEAR_START_OR_END_SECS:
-            continue
-        if float(abs(end_index - index)) / rate < NEAR_START_OR_END_SECS:
-            continue
-
-        delta_margin = volume_changing_amplitude_threshold
-        if left_block_amplitude[index] > 0:
-            delta_margin *= left_block_amplitude[index]
-
-        increasing_threshold = left_block_amplitude[index] + delta_margin
-        decreasing_threshold = left_block_amplitude[index] - delta_margin
-
-        if right_block_amplitude[index] > increasing_threshold:
-            same_event = False
-            if previous_rising_index:
-                same_event = index - previous_rising_index < same_event_samples
-            if not same_event:
-                changing_time.append(float(index) / rate - APPEND_ZEROS_SECS)
-                changing_events.append(+1)
-            previous_rising_index = index
-        if right_block_amplitude[index] < decreasing_threshold:
-            same_event = False
-            if previous_falling_index:
-                same_event = index - previous_falling_index < same_event_samples
-            if not same_event:
-                changing_time.append(float(index) / rate - APPEND_ZEROS_SECS)
-                changing_events.append(-1)
-            previous_falling_index = index
-
-    # Combines consecutive increasing/decreasing event.
-    combined_changing_events, prev = [], 0
-    for i in range(len(changing_events)):
-        if changing_events[i] == prev:
-            continue
-        combined_changing_events.append((changing_time[i], changing_events[i]))
-        prev = changing_events[i]
-    return combined_changing_events
-
-
-def quality_measurement(
-        signal,
-        rate,
-        dominant_frequency=None,
-        block_size_secs=DEFAULT_BLOCK_SIZE_SECS,
-        frequency_error_threshold=DEFAULT_FREQUENCY_ERROR,
-        delay_amplitude_threshold=DEFAULT_DELAY_AMPLITUDE_THRESHOLD,
-        noise_amplitude_threshold=DEFAULT_NOISE_AMPLITUDE_THRESHOLD,
-        burst_amplitude_threshold=DEFAULT_BURST_AMPLITUDE_THRESHOLD,
-        volume_changing_amplitude_threshold=DEFAULT_VOLUME_CHANGE_AMPLITUDE):
-    """Detects several artifacts and estimates the noise level.
-
-    This method detects artifact before playing, after playing, and delay
-    during playing. Also, it estimates the noise level of the signal.
-    To avoid the influence of noise, it calculates amplitude and frequency
-    block by block.
-
-    Args:
-        signal: A list of numbers for one-channel PCM data. The data should
-                   be normalized to [-1,1].
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        dominant_frequency: Dominant frequency of signal. Set None to
-                               recalculate the frequency in this function.
-        block_size_secs: Block size in seconds. The measurement will be done
-                            block-by-block using average amplitude and frequency
-                            in each block to avoid noise.
-        frequency_error_threshold: Ref to DEFAULT_FREQUENCY_ERROR.
-        delay_amplitude_threshold: If the average amplitude of a block is
-                                      lower than average amplitude of the wave
-                                      times delay_amplitude_threshold, it will
-                                      be considered as delay.
-                                      Also refer to delay_detection and
-                                      DEFAULT_DELAY_AMPLITUDE_THRESHOLD.
-        noise_amplitude_threshold: If the average amplitude of a block is
-                                      higher than average amplitude of the wave
-                                      times noise_amplitude_threshold, it will
-                                      be considered as noise before/after
-                                      playback.
-                                      Also refer to noise_detection and
-                                      DEFAULT_NOISE_AMPLITUDE_THRESHOLD.
-        burst_amplitude_threshold: If the average amplitude of a block is
-                                      higher than average amplitude of its left
-                                      block and its right block times
-                                      burst_amplitude_threshold. It will be
-                                      considered as a burst.
-                                      Also refer to burst_detection and
-                                      DEFAULT_BURST_AMPLITUDE_THRESHOLD.
-        volume_changing_amplitude_threshold: If the average amplitude of right
-                                                block is higher or lower than
-                                                that of left one times this
-                                                value, it will be considered as
-                                                a volume change.
-                                                Also refer to
-                                                changing_volume_detection and
-                                                DEFAULT_VOLUME_CHANGE_AMPLITUDE
-
-    Returns:
-        A dictoinary of detection/estimation:
-              {'artifacts':
-                {'noise_before_playback':
-                    [(time_1, duration_1), (time_2, duration_2), ...],
-                 'noise_after_playback':
-                    [(time_1, duration_1), (time_2, duration_2), ...],
-                 'delay_during_playback':
-                    [(time_1, duration_1), (time_2, duration_2), ...],
-                 'burst_during_playback':
-                    [time_1, time_2, ...]
-                },
-               'volume_changes':
-                 [(time_1, flag_1), (time_2, flag_2), ...],
-               'equivalent_noise_level': level
-              }
-              where durations and time points are in seconds. And,
-              equivalence_noise_level is the quotient of noise and wave which
-              refers to DEFAULT_STANDARD_NOISE. volume_changes is a list of
-              tuples containing time stamps and decreasing/increasing flags for
-              volume change events.
-
-    """
-    # Calculates the block size, from seconds to samples.
-    block_size = int(block_size_secs * rate)
-
-    signal = numpy.concatenate(
-        (numpy.zeros(int(rate * APPEND_ZEROS_SECS)), signal,
-         numpy.zeros(int(rate * APPEND_ZEROS_SECS))))
-    signal = numpy.array(signal, dtype=float)
-    length = len(signal)
-
-    # Calculates the amplitude and frequency.
-    amplitude, frequency = hilbert_analysis(signal, rate, block_size)
-
-    # Finds the dominant frequency.
-    if not dominant_frequency:
-        dominant_frequency = audio_analysis.spectral_analysis(signal,
-                                                              rate)[0][0]
-
-    # Finds the array which contains absolute difference between dominant
-    # frequency and frequency at each time point.
-    frequency_delta = abs(frequency - dominant_frequency)
-
-    # Computes average amplitude of each type of block
-    res = find_block_average_value(amplitude, block_size * 2, block_size)
-    left_block_amplitude, right_block_amplitude, block_amplitude = res
-
-    # Computes average absolute difference of frequency and dominant frequency
-    # of the block of each index
-    _, _, block_frequency_delta = find_block_average_value(
-        frequency_delta, block_size * 2, block_size)
-
-    # Finds start and end index of sine wave.
-    start_index, end_index = find_start_end_index(dominant_frequency,
-                                                  block_frequency_delta,
-                                                  block_size,
-                                                  frequency_error_threshold)
-
-    if start_index > end_index:
-        raise SineWaveNotFound('No sine wave found in signal')
-
-    logging.debug('Found sine wave: start: %s, end: %s',
-                  float(start_index) / rate - APPEND_ZEROS_SECS,
-                  float(end_index) / rate - APPEND_ZEROS_SECS)
-
-    sum_of_amplitude = float(sum(amplitude[int(start_index):int(end_index)]))
-    # Finds average amplitude of sine wave.
-    average_amplitude = sum_of_amplitude / (end_index - start_index)
-
-    # Finds noise before and/or after playback.
-    noise_before_playing, noise_after_playing = noise_detection(
-        start_index, end_index, block_amplitude, average_amplitude, rate,
-        noise_amplitude_threshold)
-
-    # Finds delay during playback.
-    delays = delay_detection(start_index, end_index, block_amplitude,
-                             average_amplitude, dominant_frequency, rate,
-                             left_block_amplitude, right_block_amplitude,
-                             block_frequency_delta, delay_amplitude_threshold,
-                             frequency_error_threshold)
-
-    # Finds burst during playback.
-    burst_time_points = burst_detection(
-        start_index, end_index, block_amplitude, average_amplitude,
-        dominant_frequency, rate, left_block_amplitude, right_block_amplitude,
-        block_frequency_delta, burst_amplitude_threshold,
-        frequency_error_threshold)
-
-    # Finds volume changing during playback.
-    volume_changes = changing_volume_detection(
-        start_index, end_index, average_amplitude, rate, left_block_amplitude,
-        right_block_amplitude, volume_changing_amplitude_threshold)
-
-    # Calculates the average teager value.
-    teager_value = average_teager_value(
-        signal[int(start_index):int(end_index)], average_amplitude)
-
-    # Finds out the noise level.
-    noise = noise_level(average_amplitude, dominant_frequency, rate,
-                        teager_value)
-
-    return {
-        'artifacts': {
-            'noise_before_playback': noise_before_playing,
-            'noise_after_playback': noise_after_playing,
-            'delay_during_playback': delays,
-            'burst_during_playback': burst_time_points
-        },
-        'volume_changes': volume_changes,
-        'equivalent_noise_level': noise
-    }
diff --git a/src/antlion/test_utils/audio_analysis_lib/check_quality.py b/src/antlion/test_utils/audio_analysis_lib/check_quality.py
deleted file mode 100644
index 0eef51b..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/check_quality.py
+++ /dev/null
@@ -1,548 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Audio Analysis tool to analyze wave file and detect artifacts."""
-
-import collections
-import json
-import logging
-import numpy
-import pprint
-import subprocess
-import tempfile
-import wave
-
-import antlion.test_utils.audio_analysis_lib.audio_analysis as audio_analysis
-import antlion.test_utils.audio_analysis_lib.audio_data as audio_data
-import antlion.test_utils.audio_analysis_lib.audio_quality_measurement as \
- audio_quality_measurement
-
-# Holder for quality parameters used in audio_quality_measurement module.
-QualityParams = collections.namedtuple('QualityParams', [
-    'block_size_secs', 'frequency_error_threshold',
-    'delay_amplitude_threshold', 'noise_amplitude_threshold',
-    'burst_amplitude_threshold'
-])
-
-DEFAULT_QUALITY_BLOCK_SIZE_SECS = 0.0015
-DEFAULT_BURST_AMPLITUDE_THRESHOLD = 1.4
-DEFAULT_DELAY_AMPLITUDE_THRESHOLD = 0.6
-DEFAULT_FREQUENCY_ERROR_THRESHOLD = 0.5
-DEFAULT_NOISE_AMPLITUDE_THRESHOLD = 0.5
-
-
-class WaveFileException(Exception):
-    """Error in WaveFile."""
-
-
-class WaveFormatExtensibleException(Exception):
-    """Wave file is in WAVE_FORMAT_EXTENSIBLE format which is not supported."""
-
-
-class WaveFile(object):
-    """Class which handles wave file reading.
-
-    Properties:
-        raw_data: audio_data.AudioRawData object for data in wave file.
-        rate: sampling rate.
-
-    """
-
-    def __init__(self, filename):
-        """Inits a wave file.
-
-        Args:
-            filename: file name of the wave file.
-
-        """
-        self.raw_data = None
-        self.rate = None
-
-        self._wave_reader = None
-        self._n_channels = None
-        self._sample_width_bits = None
-        self._n_frames = None
-        self._binary = None
-
-        try:
-            self._read_wave_file(filename)
-        except WaveFormatExtensibleException:
-            logging.warning(
-                'WAVE_FORMAT_EXTENSIBLE is not supproted. '
-                'Try command "sox in.wav -t wavpcm out.wav" to convert '
-                'the file to WAVE_FORMAT_PCM format.')
-            self._convert_and_read_wav_file(filename)
-
-    def _convert_and_read_wav_file(self, filename):
-        """Converts the wav file and read it.
-
-        Converts the file into WAVE_FORMAT_PCM format using sox command and
-        reads its content.
-
-        Args:
-            filename: The wave file to be read.
-
-        Raises:
-            RuntimeError: sox is not installed.
-
-        """
-        # Checks if sox is installed.
-        try:
-            subprocess.check_output(['sox', '--version'])
-        except:
-            raise RuntimeError('sox command is not installed. '
-                               'Try sudo apt-get install sox')
-
-        with tempfile.NamedTemporaryFile(suffix='.wav') as converted_file:
-            command = ['sox', filename, '-t', 'wavpcm', converted_file.name]
-            logging.debug('Convert the file using sox: %s', command)
-            subprocess.check_call(command)
-            self._read_wave_file(converted_file.name)
-
-    def _read_wave_file(self, filename):
-        """Reads wave file header and samples.
-
-        Args:
-            filename: The wave file to be read.
-
-        @raises WaveFormatExtensibleException: Wave file is in
-                                               WAVE_FORMAT_EXTENSIBLE format.
-        @raises WaveFileException: Wave file format is not supported.
-
-        """
-        try:
-            self._wave_reader = wave.open(filename, 'r')
-            self._read_wave_header()
-            self._read_wave_binary()
-        except wave.Error as e:
-            if 'unknown format: 65534' in str(e):
-                raise WaveFormatExtensibleException()
-            else:
-                logging.exception('Unsupported wave format')
-                raise WaveFileException()
-        finally:
-            if self._wave_reader:
-                self._wave_reader.close()
-
-    def _read_wave_header(self):
-        """Reads wave file header.
-
-        @raises WaveFileException: wave file is compressed.
-
-        """
-        # Header is a tuple of
-        # (nchannels, sampwidth, framerate, nframes, comptype, compname).
-        header = self._wave_reader.getparams()
-        logging.debug('Wave header: %s', header)
-
-        self._n_channels = header[0]
-        self._sample_width_bits = header[1] * 8
-        self.rate = header[2]
-        self._n_frames = header[3]
-        comptype = header[4]
-        compname = header[5]
-
-        if comptype != 'NONE' or compname != 'not compressed':
-            raise WaveFileException('Can not support compressed wav file.')
-
-    def _read_wave_binary(self):
-        """Reads in samples in wave file."""
-        self._binary = self._wave_reader.readframes(self._n_frames)
-        format_str = 'S%d_LE' % self._sample_width_bits
-        self.raw_data = audio_data.AudioRawData(binary=self._binary,
-                                                channel=self._n_channels,
-                                                sample_format=format_str)
-
-
-class QualityCheckerError(Exception):
-    """Error in QualityChecker."""
-
-
-class CompareFailure(QualityCheckerError):
-    """Exception when frequency comparison fails."""
-
-
-class QualityFailure(QualityCheckerError):
-    """Exception when quality check fails."""
-
-
-class QualityChecker(object):
-    """Quality checker controls the flow of checking quality of raw data."""
-
-    def __init__(self, raw_data, rate):
-        """Inits a quality checker.
-
-        Args:
-            raw_data: An audio_data.AudioRawData object.
-            rate: Sampling rate in samples per second. Example inputs: 44100,
-            48000
-
-        """
-        self._raw_data = raw_data
-        self._rate = rate
-        self._spectrals = []
-        self._quality_result = []
-
-    def do_spectral_analysis(self, ignore_high_freq, check_quality,
-                             quality_params):
-        """Gets the spectral_analysis result.
-
-        Args:
-            ignore_high_freq: Ignore high frequencies above this threshold.
-            check_quality: Check quality of each channel.
-            quality_params: A QualityParams object for quality measurement.
-
-        """
-        self.has_data()
-        for channel_idx in range(self._raw_data.channel):
-            signal = self._raw_data.channel_data[channel_idx]
-            max_abs = max(numpy.abs(signal))
-            logging.debug('Channel %d max abs signal: %f', channel_idx,
-                          max_abs)
-            if max_abs == 0:
-                logging.info('No data on channel %d, skip this channel',
-                             channel_idx)
-                continue
-
-            saturate_value = audio_data.get_maximum_value_from_sample_format(
-                self._raw_data.sample_format)
-            normalized_signal = audio_analysis.normalize_signal(
-                signal, saturate_value)
-            logging.debug('saturate_value: %f', saturate_value)
-            logging.debug('max signal after normalized: %f',
-                          max(normalized_signal))
-            spectral = audio_analysis.spectral_analysis(
-                normalized_signal, self._rate)
-
-            logging.debug('Channel %d spectral:\n%s', channel_idx,
-                          pprint.pformat(spectral))
-
-            # Ignore high frequencies above the threshold.
-            spectral = [(f, c) for (f, c) in spectral if f < ignore_high_freq]
-
-            logging.info(
-                'Channel %d spectral after ignoring high frequencies '
-                'above %f:\n%s', channel_idx, ignore_high_freq,
-                pprint.pformat(spectral))
-
-            try:
-                if check_quality:
-                    quality = audio_quality_measurement.quality_measurement(
-                        signal=normalized_signal,
-                        rate=self._rate,
-                        dominant_frequency=spectral[0][0],
-                        block_size_secs=quality_params.block_size_secs,
-                        frequency_error_threshold=quality_params.
-                        frequency_error_threshold,
-                        delay_amplitude_threshold=quality_params.
-                        delay_amplitude_threshold,
-                        noise_amplitude_threshold=quality_params.
-                        noise_amplitude_threshold,
-                        burst_amplitude_threshold=quality_params.
-                        burst_amplitude_threshold)
-
-                    logging.debug('Channel %d quality:\n%s', channel_idx,
-                                  pprint.pformat(quality))
-                    self._quality_result.append(quality)
-                self._spectrals.append(spectral)
-            except Exception as error:
-                logging.warning(
-                    "Failed to analyze channel {} with error: {}".format(
-                        channel_idx, error))
-
-    def has_data(self):
-        """Checks if data has been set.
-
-        Raises:
-            QualityCheckerError: if data or rate is not set yet.
-
-        """
-        if not self._raw_data or not self._rate:
-            raise QualityCheckerError('Data and rate is not set yet')
-
-    def check_freqs(self, expected_freqs, freq_threshold):
-        """Checks the dominant frequencies in the channels.
-
-        Args:
-            expected_freq: A list of frequencies. If frequency is 0, it
-                              means this channel should be ignored.
-            freq_threshold: The difference threshold to compare two
-                               frequencies.
-
-        """
-        logging.debug('expected_freqs: %s', expected_freqs)
-        for idx, expected_freq in enumerate(expected_freqs):
-            if expected_freq == 0:
-                continue
-            if not self._spectrals[idx]:
-                raise CompareFailure(
-                    'Failed at channel %d: no dominant frequency' % idx)
-            dominant_freq = self._spectrals[idx][0][0]
-            if abs(dominant_freq - expected_freq) > freq_threshold:
-                raise CompareFailure(
-                    'Failed at channel %d: %f is too far away from %f' %
-                    (idx, dominant_freq, expected_freq))
-
-    def check_quality(self):
-        """Checks the quality measurement results on each channel.
-
-        Raises:
-            QualityFailure when there is artifact.
-
-        """
-        error_msgs = []
-
-        for idx, quality_res in enumerate(self._quality_result):
-            artifacts = quality_res['artifacts']
-            if artifacts['noise_before_playback']:
-                error_msgs.append('Found noise before playback: %s' %
-                                  (artifacts['noise_before_playback']))
-            if artifacts['noise_after_playback']:
-                error_msgs.append('Found noise after playback: %s' %
-                                  (artifacts['noise_after_playback']))
-            if artifacts['delay_during_playback']:
-                error_msgs.append('Found delay during playback: %s' %
-                                  (artifacts['delay_during_playback']))
-            if artifacts['burst_during_playback']:
-                error_msgs.append('Found burst during playback: %s' %
-                                  (artifacts['burst_during_playback']))
-        if error_msgs:
-            raise QualityFailure('Found bad quality: %s',
-                                 '\n'.join(error_msgs))
-
-    def dump(self, output_file):
-        """Dumps the result into a file in json format.
-
-        Args:
-            output_file: A file path to dump spectral and quality
-                            measurement result of each channel.
-
-        """
-        dump_dict = {
-            'spectrals': self._spectrals,
-            'quality_result': self._quality_result
-        }
-        with open(output_file, 'w') as f:
-            json.dump(dump_dict, f)
-
-    def has_data(self):
-        """Checks if data has been set.
-
-        Raises:
-            QualityCheckerError: if data or rate is not set yet.
-
-        """
-        if not self._raw_data or not self._rate:
-            raise QualityCheckerError('Data and rate is not set yet')
-
-    def check_freqs(self, expected_freqs, freq_threshold):
-        """Checks the dominant frequencies in the channels.
-
-        Args:
-            expected_freq: A list of frequencies. If frequency is 0, it
-                              means this channel should be ignored.
-            freq_threshold: The difference threshold to compare two
-                               frequencies.
-
-        """
-        logging.debug('expected_freqs: %s', expected_freqs)
-        for idx, expected_freq in enumerate(expected_freqs):
-            if expected_freq == 0:
-                continue
-            if not self._spectrals[idx]:
-                raise CompareFailure(
-                    'Failed at channel %d: no dominant frequency' % idx)
-            dominant_freq = self._spectrals[idx][0][0]
-            if abs(dominant_freq - expected_freq) > freq_threshold:
-                raise CompareFailure(
-                    'Failed at channel %d: %f is too far away from %f' %
-                    (idx, dominant_freq, expected_freq))
-
-    def check_quality(self):
-        """Checks the quality measurement results on each channel.
-
-        Raises:
-            QualityFailure when there is artifact.
-
-        """
-        error_msgs = []
-
-        for idx, quality_res in enumerate(self._quality_result):
-            artifacts = quality_res['artifacts']
-            if artifacts['noise_before_playback']:
-                error_msgs.append('Found noise before playback: %s' %
-                                  (artifacts['noise_before_playback']))
-            if artifacts['noise_after_playback']:
-                error_msgs.append('Found noise after playback: %s' %
-                                  (artifacts['noise_after_playback']))
-            if artifacts['delay_during_playback']:
-                error_msgs.append('Found delay during playback: %s' %
-                                  (artifacts['delay_during_playback']))
-            if artifacts['burst_during_playback']:
-                error_msgs.append('Found burst during playback: %s' %
-                                  (artifacts['burst_during_playback']))
-        if error_msgs:
-            raise QualityFailure('Found bad quality: %s',
-                                 '\n'.join(error_msgs))
-
-    def dump(self, output_file):
-        """Dumps the result into a file in json format.
-
-        Args:
-            output_file: A file path to dump spectral and quality
-                            measurement result of each channel.
-
-        """
-        dump_dict = {
-            'spectrals': self._spectrals,
-            'quality_result': self._quality_result
-        }
-        with open(output_file, 'w') as f:
-            json.dump(dump_dict, f)
-
-
-class CheckQualityError(Exception):
-    """Error in check_quality main function."""
-
-
-def read_audio_file(filename, channel, bit_width, rate):
-    """Reads audio file.
-
-    Args:
-        filename: The wav or raw file to check.
-        channel: For raw file. Number of channels.
-        bit_width: For raw file. Bit width of a sample.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-
-
-    Returns:
-        A tuple (raw_data, rate) where raw_data is audio_data.AudioRawData, rate
-            is sampling rate.
-
-    """
-    if filename.endswith('.wav'):
-        wavefile = WaveFile(filename)
-        raw_data = wavefile.raw_data
-        rate = wavefile.rate
-    elif filename.endswith('.raw'):
-        binary = None
-        with open(filename, 'rb') as f:
-            binary = f.read()
-        raw_data = audio_data.AudioRawData(binary=binary,
-                                           channel=channel,
-                                           sample_format='S%d_LE' % bit_width)
-    else:
-        raise CheckQualityError('File format for %s is not supported' %
-                                filename)
-
-    return raw_data, rate
-
-
-def get_quality_params(quality_block_size_secs,
-                       quality_frequency_error_threshold,
-                       quality_delay_amplitude_threshold,
-                       quality_noise_amplitude_threshold,
-                       quality_burst_amplitude_threshold):
-    """Gets quality parameters in arguments.
-
-    Args:
-        quality_block_size_secs: Input block size in seconds.
-        quality_frequency_error_threshold: Input the frequency error
-        threshold.
-        quality_delay_amplitude_threshold: Input the delay aplitutde
-        threshold.
-        quality_noise_amplitude_threshold: Input the noise aplitutde
-        threshold.
-        quality_burst_amplitude_threshold: Input the burst aplitutde
-        threshold.
-
-    Returns:
-        A QualityParams object.
-
-    """
-    quality_params = QualityParams(
-        block_size_secs=quality_block_size_secs,
-        frequency_error_threshold=quality_frequency_error_threshold,
-        delay_amplitude_threshold=quality_delay_amplitude_threshold,
-        noise_amplitude_threshold=quality_noise_amplitude_threshold,
-        burst_amplitude_threshold=quality_burst_amplitude_threshold)
-
-    return quality_params
-
-
-def quality_analysis(
-        filename,
-        output_file,
-        bit_width,
-        rate,
-        channel,
-        freqs=None,
-        freq_threshold=5,
-        ignore_high_freq=5000,
-        spectral_only=False,
-        quality_block_size_secs=DEFAULT_QUALITY_BLOCK_SIZE_SECS,
-        quality_burst_amplitude_threshold=DEFAULT_BURST_AMPLITUDE_THRESHOLD,
-        quality_delay_amplitude_threshold=DEFAULT_DELAY_AMPLITUDE_THRESHOLD,
-        quality_frequency_error_threshold=DEFAULT_FREQUENCY_ERROR_THRESHOLD,
-        quality_noise_amplitude_threshold=DEFAULT_NOISE_AMPLITUDE_THRESHOLD,
-):
-    """ Runs various functions to measure audio quality base on user input.
-
-    Args:
-        filename: The wav or raw file to check.
-        output_file: Output file to dump analysis result in JSON format.
-        bit_width: For raw file. Bit width of a sample.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        channel: For raw file. Number of channels.
-        freqs: Expected frequencies in the channels.
-        freq_threshold: Frequency difference threshold in Hz.
-        ignore_high_freq: Frequency threshold in Hz to be ignored for high
-        frequency. Default is 5KHz
-        spectral_only: Only do spectral analysis on each channel.
-        quality_block_size_secs: Input block size in seconds.
-        quality_frequency_error_threshold: Input the frequency error
-        threshold.
-        quality_delay_amplitude_threshold: Input the delay aplitutde
-        threshold.
-        quality_noise_amplitude_threshold: Input the noise aplitutde
-        threshold.
-        quality_burst_amplitude_threshold: Input the burst aplitutde
-        threshold.
-    """
-
-    raw_data, rate = read_audio_file(filename, channel, bit_width, rate)
-
-    checker = QualityChecker(raw_data, rate)
-
-    quality_params = get_quality_params(quality_block_size_secs,
-                                        quality_frequency_error_threshold,
-                                        quality_delay_amplitude_threshold,
-                                        quality_noise_amplitude_threshold,
-                                        quality_burst_amplitude_threshold)
-
-    checker.do_spectral_analysis(ignore_high_freq=ignore_high_freq,
-                                 check_quality=(not spectral_only),
-                                 quality_params=quality_params)
-
-    checker.dump(output_file)
-
-    if freqs:
-        checker.check_freqs(freqs, freq_threshold)
-
-    if not spectral_only:
-        checker.check_quality()
-    logging.debug("Audio analysis completed.")
diff --git a/src/antlion/test_utils/bt/A2dpBaseTest.py b/src/antlion/test_utils/bt/A2dpBaseTest.py
deleted file mode 100644
index 1b8d4e0..0000000
--- a/src/antlion/test_utils/bt/A2dpBaseTest.py
+++ /dev/null
@@ -1,445 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Stream music through connected device from phone test implementation."""
-import antlion
-import os
-import pandas as pd
-import shutil
-import time
-
-import antlion.test_utils.coex.audio_test_utils as atu
-import antlion.test_utils.bt.bt_test_utils as btutils
-from antlion import asserts
-from antlion.test_utils.bt import bt_constants
-from antlion.test_utils.bt import BtEnum
-from antlion.test_utils.abstract_devices.bluetooth_handsfree_abstract_device import BluetoothHandsfreeAbstractDeviceFactory as bt_factory
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.bt.ble_performance_test_utils import plot_graph
-from antlion.test_utils.power.PowerBTBaseTest import ramp_attenuation
-from antlion.test_utils.bt.loggers import bluetooth_metric_logger as log
-from antlion.signals import TestPass, TestError
-
-PHONE_MUSIC_FILE_DIRECTORY = '/sdcard/Music'
-INIT_ATTEN = 0
-WAIT_TIME = 1
-
-
-class A2dpBaseTest(BluetoothBaseTest):
-    """Stream audio file over desired Bluetooth codec configurations.
-
-    Audio file should be a sine wave. Other audio files will not work for the
-    test analysis metrics.
-
-    Device under test is Android phone, connected to headset with a controller
-    that can generate a BluetoothHandsfreeAbstractDevice from test_utils.
-    abstract_devices.bluetooth_handsfree_abstract_device.
-    BuetoothHandsfreeAbstractDeviceFactory.
-    """
-    def setup_class(self):
-
-        super().setup_class()
-        self.bt_logger = log.BluetoothMetricLogger.for_test_case()
-        self.dut = self.android_devices[0]
-        req_params = ['audio_params', 'music_files', 'system_path_loss']
-        opt_params = ['bugreport']
-        #'audio_params' is a dict, contains the audio device type, audio streaming
-        #settings such as volumn, duration, audio recording parameters such as
-        #channel, sampling rate/width, and thdn parameters for audio processing
-        self.unpack_userparams(req_params)
-        self.unpack_userparams(opt_params, bugreport=None)
-        # Find music file and push it to the dut
-        music_src = self.music_files[0]
-        music_dest = PHONE_MUSIC_FILE_DIRECTORY
-        success = self.dut.push_system_file(music_src, music_dest)
-        if success:
-            self.music_file = os.path.join(PHONE_MUSIC_FILE_DIRECTORY,
-                                           os.path.basename(music_src))
-        # Initialize media_control class
-        self.media = btutils.MediaControlOverSl4a(self.dut, self.music_file)
-        # Set attenuator to minimum attenuation
-        if hasattr(self, 'attenuators'):
-            self.attenuator = self.attenuators[0]
-            self.attenuator.set_atten(INIT_ATTEN)
-        # Create the BTOE(Bluetooth-Other-End) device object
-        bt_devices = self.user_params.get('bt_devices', [])
-        if bt_devices:
-            attr, idx = bt_devices.split(':')
-            self.bt_device_controller = getattr(self, attr)[int(idx)]
-            self.bt_device = bt_factory().generate(self.bt_device_controller)
-        else:
-            self.log.error('No BT devices config is provided!')
-
-    def teardown_class(self):
-
-        super().teardown_class()
-        if hasattr(self, 'media'):
-            self.media.stop()
-        if hasattr(self, 'attenuator'):
-            self.attenuator.set_atten(INIT_ATTEN)
-        self.dut.droid.bluetoothFactoryReset()
-        self.bt_device.reset()
-        self.bt_device.power_off()
-        btutils.disable_bluetooth(self.dut.droid)
-
-    def setup_test(self):
-
-        super().setup_test()
-        # Initialize audio capture devices
-        self.audio_device = atu.get_audio_capture_device(
-            self.bt_device_controller, self.audio_params)
-        # Reset BT to factory defaults
-        self.dut.droid.bluetoothFactoryReset()
-        self.bt_device.reset()
-        self.bt_device.power_on()
-        btutils.enable_bluetooth(self.dut.droid, self.dut.ed)
-        btutils.connect_phone_to_headset(self.dut, self.bt_device, 60)
-        vol = self.dut.droid.getMaxMediaVolume() * self.audio_params['volume']
-        self.dut.droid.setMediaVolume(0)
-        time.sleep(1)
-        self.dut.droid.setMediaVolume(int(vol))
-
-    def teardown_test(self):
-
-        super().teardown_test()
-        self.dut.droid.bluetoothFactoryReset()
-        self.media.stop()
-        # Set Attenuator to the initial attenuation
-        if hasattr(self, 'attenuator'):
-            self.attenuator.set_atten(INIT_ATTEN)
-        self.bt_device.reset()
-        self.bt_device.power_off()
-        btutils.disable_bluetooth(self.dut.droid)
-
-    def on_pass(self, test_name, begin_time):
-
-        if hasattr(self, 'bugreport') and self.bugreport == 1:
-            self._take_bug_report(test_name, begin_time)
-
-    def play_and_record_audio(self, duration):
-        """Play and record audio for a set duration.
-
-        Args:
-            duration: duration in seconds for music playing
-        Returns:
-            audio_captured: captured audio file path
-        """
-
-        self.log.info('Play and record audio for {} second'.format(duration))
-        self.media.play()
-        proc = self.audio_device.start()
-        time.sleep(duration + WAIT_TIME)
-        proc.kill()
-        time.sleep(WAIT_TIME)
-        proc.kill()
-        audio_captured = self.audio_device.stop()
-        self.media.stop()
-        self.log.info('Audio play and record stopped')
-        asserts.assert_true(audio_captured, 'Audio not recorded')
-        return audio_captured
-
-    def _get_bt_link_metrics(self, tag=''):
-        """Get bt link metrics such as rssi and tx pwls.
-
-        Returns:
-            master_metrics_list: list of metrics of central device
-            slave_metrics_list: list of metric of peripheral device
-        """
-
-        self.raw_bt_metrics_path = os.path.join(self.log_path,
-                                                'BT_Raw_Metrics')
-        self.media.play()
-        # Get master rssi and power level
-        process_data_dict = btutils.get_bt_metric(
-            self.dut, tag=tag, log_path=self.raw_bt_metrics_path)
-        rssi_master = process_data_dict.get('rssi')
-        pwl_master = process_data_dict.get('pwlv')
-        rssi_c0_master = process_data_dict.get('rssi_c0')
-        rssi_c1_master = process_data_dict.get('rssi_c1')
-        txpw_c0_master = process_data_dict.get('txpw_c0')
-        txpw_c1_master = process_data_dict.get('txpw_c1')
-        bftx_master = process_data_dict.get('bftx')
-        divtx_master = process_data_dict.get('divtx')
-
-        if isinstance(self.bt_device_controller,
-                      antlion.controllers.android_device.AndroidDevice):
-            rssi_slave = btutils.get_bt_rssi(self.bt_device_controller,
-                                             tag=tag,
-                                             log_path=self.raw_bt_metrics_path)
-        else:
-            rssi_slave = None
-        self.media.stop()
-
-        master_metrics_list = [
-            rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
-            txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
-        ]
-        slave_metrics_list = [rssi_slave]
-
-        return master_metrics_list, slave_metrics_list
-
-    def run_thdn_analysis(self, audio_captured, tag):
-        """Calculate Total Harmonic Distortion plus Noise for latest recording.
-
-        Store result in self.metrics.
-
-        Args:
-            audio_captured: the captured audio file
-        Returns:
-            thdn: thdn value in a list
-        """
-        # Calculate Total Harmonic Distortion + Noise
-        audio_result = atu.AudioCaptureResult(audio_captured,
-                                              self.audio_params)
-        thdn = audio_result.THDN(**self.audio_params['thdn_params'])
-        file_name = tag + os.path.basename(audio_result.path)
-        file_new = os.path.join(os.path.dirname(audio_result.path), file_name)
-        shutil.copyfile(audio_result.path, file_new)
-        for ch_no, t in enumerate(thdn):
-            self.log.info('THD+N for channel %s: %.4f%%' % (ch_no, t * 100))
-        return thdn
-
-    def run_anomaly_detection(self, audio_captured):
-        """Detect anomalies in latest recording.
-
-        Store result in self.metrics.
-
-        Args:
-            audio_captured: the captured audio file
-        Returns:
-            anom: anom detected in the captured file
-        """
-        # Detect Anomalies
-        audio_result = atu.AudioCaptureResult(audio_captured)
-        anom = audio_result.detect_anomalies(
-            **self.audio_params['anomaly_params'])
-        num_anom = 0
-        for ch_no, anomalies in enumerate(anom):
-            if anomalies:
-                for anomaly in anomalies:
-                    num_anom += 1
-                    start, end = anomaly
-                    self.log.warning(
-                        'Anomaly on channel {} at {}:{}. Duration '
-                        '{} sec'.format(ch_no, start // 60, start % 60,
-                                        end - start))
-        else:
-            self.log.info('%i anomalies detected.' % num_anom)
-        return anom
-
-    def generate_proto(self, data_points, codec_type, sample_rate,
-                       bits_per_sample, channel_mode):
-        """Generate a results protobuf.
-
-        Args:
-            data_points: list of dicts representing info to go into
-              AudioTestDataPoint protobuffer message.
-            codec_type: The codec type config to store in the proto.
-            sample_rate: The sample rate config to store in the proto.
-            bits_per_sample: The bits per sample config to store in the proto.
-            channel_mode: The channel mode config to store in the proto.
-        Returns:
-             dict: Dictionary with key 'proto' mapping to serialized protobuf,
-               'proto_ascii' mapping to human readable protobuf info, and 'test'
-               mapping to the test class name that generated the results.
-        """
-
-        # Populate protobuf
-        test_case_proto = self.bt_logger.proto_module.BluetoothAudioTestResult(
-        )
-
-        for data_point in data_points:
-            audio_data_proto = test_case_proto.data_points.add()
-            log.recursive_assign(audio_data_proto, data_point)
-
-        codec_proto = test_case_proto.a2dp_codec_config
-        codec_proto.codec_type = bt_constants.codec_types[codec_type]
-        codec_proto.sample_rate = int(sample_rate)
-        codec_proto.bits_per_sample = int(bits_per_sample)
-        codec_proto.channel_mode = bt_constants.channel_modes[channel_mode]
-
-        self.bt_logger.add_config_data_to_proto(test_case_proto, self.dut,
-                                                self.bt_device)
-
-        self.bt_logger.add_proto_to_results(test_case_proto,
-                                            self.__class__.__name__)
-
-        proto_dict = self.bt_logger.get_proto_dict(self.__class__.__name__,
-                                                   test_case_proto)
-        del proto_dict["proto_ascii"]
-        return proto_dict
-
-    def set_test_atten(self, atten):
-        """Set the attenuation(s) for current test condition.
-
-        """
-        if hasattr(self, 'dual_chain') and self.dual_chain == 1:
-            ramp_attenuation(self.atten_c0,
-                             atten,
-                             attenuation_step_max=2,
-                             time_wait_in_between=1)
-            self.log.info('Set Chain 0 attenuation to %d dB', atten)
-            ramp_attenuation(self.atten_c1,
-                             atten + self.gain_mismatch,
-                             attenuation_step_max=2,
-                             time_wait_in_between=1)
-            self.log.info('Set Chain 1 attenuation to %d dB',
-                          atten + self.gain_mismatch)
-        else:
-            ramp_attenuation(self.attenuator, atten)
-            self.log.info('Set attenuation to %d dB', atten)
-
-    def run_a2dp_to_max_range(self, codec_config):
-        attenuation_range = range(self.attenuation_vector['start'],
-                                  self.attenuation_vector['stop'] + 1,
-                                  self.attenuation_vector['step'])
-
-        data_points = []
-        self.file_output = os.path.join(
-            self.log_path, '{}.csv'.format(self.current_test_name))
-
-        # Set Codec if needed
-        current_codec = self.dut.droid.bluetoothA2dpGetCurrentCodecConfig()
-        current_codec_type = BtEnum.BluetoothA2dpCodecType(
-            current_codec['codecType']).name
-        if current_codec_type != codec_config['codec_type']:
-            codec_set = btutils.set_bluetooth_codec(self.dut, **codec_config)
-            asserts.assert_true(codec_set, 'Codec configuration failed.')
-        else:
-            self.log.info('Current codec is {}, no need to change'.format(
-                current_codec_type))
-
-        #loop RSSI with the same codec setting
-        for atten in attenuation_range:
-            self.media.play()
-            self.set_test_atten(atten)
-
-            tag = 'codec_{}_attenuation_{}dB_'.format(
-                codec_config['codec_type'], atten)
-            recorded_file = self.play_and_record_audio(
-                self.audio_params['duration'])
-            thdns = self.run_thdn_analysis(recorded_file, tag)
-
-            # Collect Metrics for dashboard
-            [
-                rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
-                txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
-            ], [rssi_slave] = self._get_bt_link_metrics(tag)
-
-            data_point = {
-                'attenuation_db':
-                int(self.attenuator.get_atten()),
-                'pathloss':
-                atten + self.system_path_loss,
-                'rssi_primary':
-                rssi_master.get(self.dut.serial, -127),
-                'tx_power_level_master':
-                pwl_master.get(self.dut.serial, -127),
-                'rssi_secondary':
-                rssi_slave.get(self.bt_device_controller.serial, -127),
-                'rssi_c0_dut':
-                rssi_c0_master.get(self.dut.serial, -127),
-                'rssi_c1_dut':
-                rssi_c1_master.get(self.dut.serial, -127),
-                'txpw_c0_dut':
-                txpw_c0_master.get(self.dut.serial, -127),
-                'txpw_c1_dut':
-                txpw_c1_master.get(self.dut.serial, -127),
-                'bftx_state':
-                bftx_master.get(self.dut.serial, -127),
-                'divtx_state':
-                divtx_master.get(self.dut.serial, -127),
-                'total_harmonic_distortion_plus_noise_percent':
-                thdns[0] * 100
-            }
-            self.log.info(data_point)
-            # bokeh data for generating BokehFigure
-            bokeh_data = {
-                'x_label': 'Pathloss (dBm)',
-                'primary_y_label': 'RSSI (dBm)',
-                'log_path': self.log_path,
-                'current_test_name': self.current_test_name
-            }
-            #plot_data for adding line to existing BokehFigure
-            plot_data = {
-                'line_one': {
-                    'x_label': 'Pathloss (dBm)',
-                    'primary_y_label': 'RSSI (dBm)',
-                    'x_column': 'pathloss',
-                    'y_column': 'rssi_primary',
-                    'legend': 'DUT RSSI (dBm)',
-                    'marker': 'circle_x',
-                    'y_axis': 'default'
-                },
-                'line_two': {
-                    'x_column': 'pathloss',
-                    'y_column': 'rssi_secondary',
-                    'legend': 'Remote device RSSI (dBm)',
-                    'marker': 'hex',
-                    'y_axis': 'default'
-                },
-                'line_three': {
-                    'x_column': 'pathloss',
-                    'y_column': 'tx_power_level_master',
-                    'legend': 'DUT TX Power (dBm)',
-                    'marker': 'hex',
-                    'y_axis': 'secondary'
-                }
-            }
-
-            # Check thdn for glitches, stop if max range reached
-            if thdns[0] == 0:
-                proto_dict = self.generate_proto(data_points, **codec_config)
-                A2dpRange_df = pd.DataFrame(data_points)
-                A2dpRange_df.to_csv(self.file_output, index=False)
-                plot_graph(A2dpRange_df,
-                           plot_data,
-                           bokeh_data,
-                           secondary_y_label='DUT TX Power')
-                raise TestError(
-                    'Music play/recording is not working properly or Connection has lost'
-                )
-
-            data_points.append(data_point)
-            A2dpRange_df = pd.DataFrame(data_points)
-
-            for thdn in thdns:
-                if thdn >= self.audio_params['thdn_threshold']:
-                    self.log.info(
-                        'Max range at attenuation {} dB'.format(atten))
-                    self.log.info('DUT rssi {} dBm, DUT tx power level {}, '
-                                  'Remote rssi {} dBm'.format(
-                                      rssi_master, pwl_master, rssi_slave))
-                    proto_dict = self.generate_proto(data_points,
-                                                     **codec_config)
-                    A2dpRange_df.to_csv(self.file_output, index=False)
-                    plot_graph(A2dpRange_df,
-                               plot_data,
-                               bokeh_data,
-                               secondary_y_label='DUT TX Power')
-                    return True
-                    raise TestPass('Max range reached and move to next codec',
-                                   extras=proto_dict)
-        # Save Data points to csv
-        A2dpRange_df.to_csv(self.file_output, index=False)
-        # Plot graph
-        plot_graph(A2dpRange_df,
-                   plot_data,
-                   bokeh_data,
-                   secondary_y_label='DUT TX Power')
-        proto_dict = self.generate_proto(data_points, **codec_config)
-        return True
-        raise TestPass('Could not reach max range, need extra attenuation.',
-                       extras=proto_dict)
diff --git a/src/antlion/test_utils/bt/AvrcpBaseTest.py b/src/antlion/test_utils/bt/AvrcpBaseTest.py
deleted file mode 100644
index d6d2007..0000000
--- a/src/antlion/test_utils/bt/AvrcpBaseTest.py
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Perform base Avrcp command from headset to dut"""
-import time
-import os
-import queue
-
-from antlion import asserts
-from antlion.test_utils.abstract_devices.bluetooth_handsfree_abstract_device import BluetoothHandsfreeAbstractDeviceFactory as Factory
-from antlion.test_utils.bt.simulated_carkit_device import SimulatedCarkitDevice
-from antlion.test_utils.bt.bt_test_utils import connect_phone_to_headset
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.car.car_media_utils import EVENT_PLAY_RECEIVED
-from antlion.test_utils.car.car_media_utils import EVENT_PAUSE_RECEIVED
-from antlion.test_utils.car.car_media_utils import EVENT_SKIP_NEXT_RECEIVED
-from antlion.test_utils.car.car_media_utils import EVENT_SKIP_PREV_RECEIVED
-from antlion.test_utils.car.car_media_utils import CMD_MEDIA_PLAY
-from antlion.test_utils.car.car_media_utils import CMD_MEDIA_PAUSE
-
-ADB_FILE_EXISTS = 'test -e %s && echo True'
-DEFAULT_TIMEOUT = 5
-EVENT_TIMEOUT = 1
-
-
-class AvrcpBaseTest(BluetoothBaseTest):
-    def __init__(self, configs):
-        super(AvrcpBaseTest, self).__init__(configs)
-        self.dut = self.android_devices[0]
-        serial = self.user_params['simulated_carkit_device']
-        controller = SimulatedCarkitDevice(serial)
-        self.controller = Factory().generate(controller)
-
-        self.phone_music_files = []
-        self.host_music_files = []
-        for music_file in self.user_params['music_file_names']:
-            self.phone_music_files.append(os.path.join(
-                self.user_params['phone_music_file_dir'], music_file))
-            self.host_music_files.append(os.path.join(
-                self.user_params['host_music_file_dir'], music_file))
-
-        self.ensure_phone_has_music_file()
-
-    def setup_class(self):
-        super().setup_class()
-        self.controller.power_on()
-        time.sleep(DEFAULT_TIMEOUT)
-
-    def teardown_class(self):
-        super().teardown_class()
-        self.dut.droid.mediaPlayStop()
-        self.controller.destroy()
-
-    def setup_test(self):
-        self.dut.droid.bluetoothMediaPhoneSL4AMBSStart()
-        time.sleep(DEFAULT_TIMEOUT)
-
-        self.dut.droid.bluetoothStartPairingHelper(True)
-        if not connect_phone_to_headset(self.dut, self.controller, 600):
-            asserts.fail('Not able to connect to hands-free device')
-
-        #make sure SL4AMBS is active MediaSession
-        self.dut.droid.bluetoothMediaHandleMediaCommandOnPhone(CMD_MEDIA_PLAY)
-        time.sleep(0.5)
-        self.dut.droid.bluetoothMediaHandleMediaCommandOnPhone(CMD_MEDIA_PAUSE)
-
-    def teardown_test(self):
-        self.dut.droid.bluetoothMediaPhoneSL4AMBSStop()
-
-    def ensure_phone_has_music_file(self):
-        """Make sure music file (based on config values) is on the phone."""
-        for host_file, phone_file in zip(self.host_music_files,
-                                         self.phone_music_files):
-            if self.dut.adb.shell(ADB_FILE_EXISTS % phone_file):
-                self.log.info(
-                    'Music file {} already on phone. Skipping file transfer.'
-                    .format(host_file))
-            else:
-                self.dut.adb.push(host_file, phone_file)
-                has_file = self.dut.adb.shell(
-                        ADB_FILE_EXISTS % phone_file)
-                if not has_file:
-                    self.log.error(
-                        'Audio file {} not pushed to phone.'.format(host_file))
-                self.log.info('Music file successfully pushed to phone.')
-
-    def play_from_controller(self):
-        self.dut.ed.clear_all_events()
-        self.controller.play()
-        try:
-            self.dut.ed.pop_event(EVENT_PLAY_RECEIVED, EVENT_TIMEOUT)
-        except queue.Empty as e:
-            asserts.fail('{} Event Not received'.format(EVENT_PLAY_RECEIVED))
-        self.log.info('Event Received : {}'.format(EVENT_PLAY_RECEIVED))
-
-    def pause_from_controller(self):
-        self.dut.ed.clear_all_events()
-        self.controller.pause()
-        try:
-            self.dut.ed.pop_event(EVENT_PAUSE_RECEIVED, EVENT_TIMEOUT)
-        except queue.Empty as e:
-            asserts.fail('{} Event Not received'.format(EVENT_PAUSE_RECEIVED))
-        self.log.info('Event Received : {}'.format(EVENT_PAUSE_RECEIVED))
-
-    def skip_next_from_controller(self):
-        self.dut.ed.clear_all_events()
-        self.controller.next_track()
-        try:
-            self.dut.ed.pop_event(EVENT_SKIP_NEXT_RECEIVED, EVENT_TIMEOUT)
-        except queue.Empty as e:
-            asserts.fail('{} Event Not '
-                         'received'.format(EVENT_SKIP_NEXT_RECEIVED))
-        self.log.info('Event Received : {}'.format(EVENT_SKIP_NEXT_RECEIVED))
-
-    def skip_prev_from_controller(self):
-        self.dut.ed.clear_all_events()
-        self.controller.previous_track()
-        try:
-            self.dut.ed.pop_event(EVENT_SKIP_PREV_RECEIVED, EVENT_TIMEOUT)
-        except queue.Empty as e:
-            asserts.fail('{} Event Not '
-                         'received'.format(EVENT_SKIP_PREV_RECEIVED))
-        self.log.info('Event Received : {}'.format(EVENT_SKIP_PREV_RECEIVED))
diff --git a/src/antlion/test_utils/bt/BleEnum.py b/src/antlion/test_utils/bt/BleEnum.py
deleted file mode 100644
index 4aed867..0000000
--- a/src/antlion/test_utils/bt/BleEnum.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-
-
-class ScanSettingsCallbackType(Enum):
-    CALLBACK_TYPE_ALL_MATCHES = 1
-    CALLBACK_TYPE_FIRST_MATCH = 2
-    CALLBACK_TYPE_MATCH_LOST = 4
-    CALLBACK_TYPE_FOUND_AND_LOST = 6
-
-
-class ScanSettingsMatchMode(Enum):
-    AGGRESIVE = 1
-    STICKY = 2
-
-
-class ScanSettingsMatchNum(Enum):
-    MATCH_NUM_ONE_ADVERTISEMENT = 1
-    MATCH_NUM_FEW_ADVERTISEMENT = 2
-    MATCH_NUM_MAX_ADVERTISEMENT = 3
-
-
-class ScanSettingsScanResultType(Enum):
-    SCAN_RESULT_TYPE_FULL = 0
-    SCAN_RESULT_TYPE_ABBREVIATED = 1
-
-
-class ScanSettingsScanMode(Enum):
-    SCAN_MODE_OPPORTUNISTIC = -1
-    SCAN_MODE_LOW_POWER = 0
-    SCAN_MODE_BALANCED = 1
-    SCAN_MODE_LOW_LATENCY = 2
-
-
-class ScanSettingsReportDelaySeconds(Enum):
-    MIN = 0
-    MAX = 9223372036854775807
-
-
-class ScanSettingsPhy(Enum):
-    PHY_LE_1M = 1
-    PHY_LE_CODED = 3
-    PHY_LE_ALL_SUPPORTED = 255
-
-
-class AdvertiseSettingsAdvertiseType(Enum):
-    ADVERTISE_TYPE_NON_CONNECTABLE = 0
-    ADVERTISE_TYPE_CONNECTABLE = 1
-
-
-class AdvertiseSettingsAdvertiseMode(Enum):
-    ADVERTISE_MODE_LOW_POWER = 0
-    ADVERTISE_MODE_BALANCED = 1
-    ADVERTISE_MODE_LOW_LATENCY = 2
-
-
-class AdvertiseSettingsAdvertiseTxPower(Enum):
-    ADVERTISE_TX_POWER_ULTRA_LOW = 0
-    ADVERTISE_TX_POWER_LOW = 1
-    ADVERTISE_TX_POWER_MEDIUM = 2
-    ADVERTISE_TX_POWER_HIGH = 3
-
-
-class BLEConnectionPriority(Enum):
-    # Connection Interval: BALANCED = 36ms, HIGH = 12ms, LOW = 96ms
-    CONNECTION_PRIORITY_BALANCED = 0
-    CONNECTION_PRIORITY_HIGH = 1
-    CONNECTION_PRIORITY_LOW = 2
-
-
-class JavaInteger(Enum):
-    MIN = -2147483648
-    MAX = 2147483647
-
-
-class Uuids(Enum):
-    P_Service = "0000feef-0000-1000-8000-00805f9b34fb"
-    HR_SERVICE = "0000180d-0000-1000-8000-00805f9b34fb"
-
-
-class AdvertiseErrorCode(Enum):
-    DATA_TOO_LARGE = 1
-    TOO_MANY_ADVERTISERS = 2
-    ADVERTISE_ALREADY_STARTED = 3
-    BLUETOOTH_INTERNAL_FAILURE = 4
-    FEATURE_NOT_SUPPORTED = 5
-
-
-class BluetoothAdapterState(Enum):
-    STATE_OFF = 10
-    STATE_TURNING_ON = 11
-    STATE_ON = 12
-    STATE_TURNING_OFF = 13
-    STATE_BLE_TURNING_ON = 14
-    STATE_BLE_ON = 15
-    STATE_BLE_TURNING_OFF = 16
diff --git a/src/antlion/test_utils/bt/BluetoothBaseTest.py b/src/antlion/test_utils/bt/BluetoothBaseTest.py
deleted file mode 100644
index 4730bc9..0000000
--- a/src/antlion/test_utils/bt/BluetoothBaseTest.py
+++ /dev/null
@@ -1,191 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-    Base Class for Defining Common Bluetooth Test Functionality
-"""
-
-import threading
-import time
-import traceback
-import os
-from antlion.base_test import BaseTestClass
-from antlion.signals import TestSignal
-from antlion.utils import dump_string_to_file
-
-from antlion.test_utils.bt.bt_test_utils import get_device_selector_dictionary
-from antlion.test_utils.bt.bt_test_utils import reset_bluetooth
-from antlion.test_utils.bt.bt_test_utils import setup_multiple_devices_for_bt_test
-from antlion.test_utils.bt.bt_test_utils import take_btsnoop_logs
-from antlion.test_utils.bt.ble_lib import BleLib
-from antlion.test_utils.bt.bta_lib import BtaLib
-from antlion.test_utils.bt.config_lib import ConfigLib
-from antlion.test_utils.bt.gattc_lib import GattClientLib
-from antlion.test_utils.bt.gatts_lib import GattServerLib
-from antlion.test_utils.bt.rfcomm_lib import RfcommLib
-from antlion.test_utils.bt.shell_commands_lib import ShellCommands
-
-
-class BluetoothBaseTest(BaseTestClass):
-    DEFAULT_TIMEOUT = 10
-    start_time = 0
-    timer_list = []
-
-    # Use for logging in the test cases to facilitate
-    # faster log lookup and reduce ambiguity in logging.
-    @staticmethod
-    def bt_test_wrap(fn):
-        def _safe_wrap_test_case(self, *args, **kwargs):
-            test_id = "{}:{}:{}".format(self.__class__.__name__, fn.__name__,
-                                        time.time())
-            log_string = "[Test ID] {}".format(test_id)
-            self.log.info(log_string)
-            try:
-                for ad in self.android_devices:
-                    ad.droid.logI("Started " + log_string)
-                result = fn(self, *args, **kwargs)
-                for ad in self.android_devices:
-                    ad.droid.logI("Finished " + log_string)
-                if result is not True and "bt_auto_rerun" in self.user_params:
-                    self.teardown_test()
-                    log_string = "[Rerun Test ID] {}. 1st run failed.".format(
-                        test_id)
-                    self.log.info(log_string)
-                    self.setup_test()
-                    for ad in self.android_devices:
-                        ad.droid.logI("Rerun Started " + log_string)
-                    result = fn(self, *args, **kwargs)
-                    if result is True:
-                        self.log.info("Rerun passed.")
-                    elif result is False:
-                        self.log.info("Rerun failed.")
-                    else:
-                        # In the event that we have a non-bool or null
-                        # retval, we want to clearly distinguish this in the
-                        # logs from an explicit failure, though the test will
-                        # still be considered a failure for reporting purposes.
-                        self.log.info("Rerun indeterminate.")
-                        result = False
-                return result
-            except TestSignal:
-                raise
-            except Exception as e:
-                self.log.error(traceback.format_exc())
-                self.log.error(str(e))
-                raise
-            return fn(self, *args, **kwargs)
-
-        return _safe_wrap_test_case
-
-    def setup_class(self):
-        super().setup_class()
-        for ad in self.android_devices:
-            self._setup_bt_libs(ad)
-        if 'preferred_device_order' in self.user_params:
-            prefered_device_order = self.user_params['preferred_device_order']
-            for i, ad in enumerate(self.android_devices):
-                if ad.serial in prefered_device_order:
-                    index = prefered_device_order.index(ad.serial)
-                    self.android_devices[i], self.android_devices[index] = \
-                        self.android_devices[index], self.android_devices[i]
-
-        if "reboot_between_test_class" in self.user_params:
-            threads = []
-            for a in self.android_devices:
-                thread = threading.Thread(
-                    target=self._reboot_device, args=([a]))
-                threads.append(thread)
-                thread.start()
-            for t in threads:
-                t.join()
-        if not setup_multiple_devices_for_bt_test(self.android_devices):
-            return False
-        self.device_selector = get_device_selector_dictionary(
-            self.android_devices)
-        if "bluetooth_proto_path" in self.user_params:
-            for ad in self.android_devices:
-                ad.metrics_path = os.path.join(ad.log_path, "BluetoothMetrics")
-                os.makedirs(ad.metrics_path, exist_ok=True)
-        return True
-
-    def setup_test(self):
-        self.timer_list = []
-        for a in self.android_devices:
-            a.ed.clear_all_events()
-            a.droid.setScreenTimeout(500)
-            a.droid.wakeUpNow()
-        return True
-
-    def on_fail(self, test_name, begin_time):
-        self.log.debug(
-            "Test {} failed. Gathering bugreport and btsnoop logs".format(
-                test_name))
-        take_btsnoop_logs(self.android_devices, self, test_name)
-        self._take_bug_report(test_name, begin_time)
-        for _ in range(5):
-            if reset_bluetooth(self.android_devices):
-                break
-            else:
-                self.log.error("Failed to reset Bluetooth... retrying.")
-        return
-
-    def _get_time_in_milliseconds(self):
-        return int(round(time.time() * 1000))
-
-    def start_timer(self):
-        self.start_time = self._get_time_in_milliseconds()
-
-    def end_timer(self):
-        total_time = self._get_time_in_milliseconds() - self.start_time
-        self.timer_list.append(total_time)
-        self.start_time = 0
-        return total_time
-
-    def log_stats(self):
-        if self.timer_list:
-            self.log.info("Overall list {}".format(self.timer_list))
-            self.log.info("Average of list {}".format(
-                sum(self.timer_list) / float(len(self.timer_list))))
-            self.log.info("Maximum of list {}".format(max(self.timer_list)))
-            self.log.info("Minimum of list {}".format(min(self.timer_list)))
-            self.log.info("Total items in list {}".format(
-                len(self.timer_list)))
-        self.timer_list = []
-
-    def _setup_bt_libs(self, android_device):
-        # Bluetooth Low Energy library.
-        setattr(android_device, "ble", BleLib(
-            log=self.log, dut=android_device))
-        # Bluetooth Adapter library.
-        setattr(android_device, "bta", BtaLib(
-            log=self.log, dut=android_device))
-        # Bluetooth stack config library.
-        setattr(android_device, "config",
-                ConfigLib(log=self.log, dut=android_device))
-        # GATT Client library.
-        setattr(android_device, "gattc",
-                GattClientLib(log=self.log, dut=android_device))
-        # GATT Server library.
-        setattr(android_device, "gatts",
-                GattServerLib(log=self.log, dut=android_device))
-        # RFCOMM library.
-        setattr(android_device, "rfcomm",
-                RfcommLib(log=self.log, dut=android_device))
-        # Shell command library
-        setattr(android_device, "shell",
-                ShellCommands(log=self.log, dut=android_device))
-        # Setup Android Device feature list
-        setattr(android_device, "features",
-                android_device.adb.shell("pm list features").split("\n"))
diff --git a/src/antlion/test_utils/bt/BluetoothCarHfpBaseTest.py b/src/antlion/test_utils/bt/BluetoothCarHfpBaseTest.py
deleted file mode 100644
index 11e4c1b..0000000
--- a/src/antlion/test_utils/bt/BluetoothCarHfpBaseTest.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is base class for tests that exercises different GATT procedures between two connected devices.
-Setup/Teardown methods take care of establishing connection, and doing GATT DB initialization/discovery.
-"""
-
-import os
-import time
-
-from antlion.keys import Config
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.bt.bt_test_utils import pair_pri_to_sec
-
-
-class BluetoothCarHfpBaseTest(BluetoothBaseTest):
-    DEFAULT_TIMEOUT = 15
-    ag_phone_number = ""
-    re_phone_number = ""
-
-    def __init__(self, controllers):
-        BluetoothBaseTest.__init__(self, controllers)
-        # HF : HandsFree (CarKit role)
-        self.hf = self.android_devices[0]
-        self.hf.log.info("Role set to HF (HandsFree Carkit role).")
-        # AG : Audio Gateway (Phone role)
-        self.ag = self.android_devices[1]
-        self.ag.log.info("Role set to AG (Audio Gateway Phone role).")
-        # RE : Remote Device (Phone being talked to role)
-        if len(self.android_devices) > 2:
-            self.re = self.android_devices[2]
-            self.re.log.info("Role set to RE (Remote device).")
-        else:
-            self.re = None
-        if len(self.android_devices) > 3:
-            self.re2 = self.android_devices[3]
-            self.re2.log.info("Role set to RE2 (Remote device 2).")
-        else:
-            self.re2 = None
-
-    def setup_class(self):
-        super(BluetoothCarHfpBaseTest, self).setup_class()
-        if not "sim_conf_file" in self.user_params.keys():
-            self.log.error("Missing mandatory user config \"sim_conf_file\"!")
-            return False
-        sim_conf_file = self.user_params["sim_conf_file"][0]
-        if not os.path.isfile(sim_conf_file):
-            sim_conf_file = os.path.join(
-                self.user_params[Config.key_config_path.value], sim_conf_file)
-            if not os.path.isfile(sim_conf_file):
-                self.log.error("Unable to load user config " + sim_conf_file +
-                               " from test config file.")
-                return False
-        # Pair and connect the devices.
-        # Grace time inbetween stack state changes
-        time.sleep(5)
-        if not pair_pri_to_sec(
-                self.hf, self.ag, attempts=4, auto_confirm=False):
-            self.log.error("Failed to pair")
-            return False
-        return True
diff --git a/src/antlion/test_utils/bt/BtEnum.py b/src/antlion/test_utils/bt/BtEnum.py
deleted file mode 100644
index 380095b..0000000
--- a/src/antlion/test_utils/bt/BtEnum.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-from enum import IntEnum
-
-
-class BluetoothScanModeType(IntEnum):
-    STATE_OFF = -1
-    SCAN_MODE_NONE = 0
-    SCAN_MODE_CONNECTABLE = 1
-    SCAN_MODE_CONNECTABLE_DISCOVERABLE = 3
-
-
-class BluetoothAdapterState(IntEnum):
-    STATE_OFF = 10
-    STATE_TURNING_ON = 11
-    STATE_ON = 12
-    STATE_TURNING_OFF = 13
-    STATE_BLE_TURNING_ON = 14
-    STATE_BLE_ON = 15
-    STATE_BLE_TURNING_OFF = 16
-
-
-class BluetoothProfile(IntEnum):
-    # Should be kept in sync with BluetoothProfile.java
-    HEADSET = 1
-    A2DP = 2
-    HEALTH = 3
-    INPUT_DEVICE = 4
-    PAN = 5
-    PBAP_SERVER = 6
-    GATT = 7
-    GATT_SERVER = 8
-    MAP = 9
-    SAP = 10
-    A2DP_SINK = 11
-    AVRCP_CONTROLLER = 12
-    HEADSET_CLIENT = 16
-    PBAP_CLIENT = 17
-    MAP_MCE = 18
-
-
-class RfcommUuid(Enum):
-    DEFAULT_UUID = "457807c0-4897-11df-9879-0800200c9a66"
-    BASE_UUID = "00000000-0000-1000-8000-00805F9B34FB"
-    SDP = "00000001-0000-1000-8000-00805F9B34FB"
-    UDP = "00000002-0000-1000-8000-00805F9B34FB"
-    RFCOMM = "00000003-0000-1000-8000-00805F9B34FB"
-    TCP = "00000004-0000-1000-8000-00805F9B34FB"
-    TCS_BIN = "00000005-0000-1000-8000-00805F9B34FB"
-    TCS_AT = "00000006-0000-1000-8000-00805F9B34FB"
-    ATT = "00000007-0000-1000-8000-00805F9B34FB"
-    OBEX = "00000008-0000-1000-8000-00805F9B34FB"
-    IP = "00000009-0000-1000-8000-00805F9B34FB"
-    FTP = "0000000A-0000-1000-8000-00805F9B34FB"
-    HTTP = "0000000C-0000-1000-8000-00805F9B34FB"
-    WSP = "0000000E-0000-1000-8000-00805F9B34FB"
-    BNEP = "0000000F-0000-1000-8000-00805F9B34FB"
-    UPNP = "00000010-0000-1000-8000-00805F9B34FB"
-    HIDP = "00000011-0000-1000-8000-00805F9B34FB"
-    HARDCOPY_CONTROL_CHANNEL = "00000012-0000-1000-8000-00805F9B34FB"
-    HARDCOPY_DATA_CHANNEL = "00000014-0000-1000-8000-00805F9B34FB"
-    HARDCOPY_NOTIFICATION = "00000016-0000-1000-8000-00805F9B34FB"
-    AVCTP = "00000017-0000-1000-8000-00805F9B34FB"
-    AVDTP = "00000019-0000-1000-8000-00805F9B34FB"
-    CMTP = "0000001B-0000-1000-8000-00805F9B34FB"
-    MCAP_CONTROL_CHANNEL = "0000001E-0000-1000-8000-00805F9B34FB"
-    MCAP_DATA_CHANNEL = "0000001F-0000-1000-8000-00805F9B34FB"
-    L2CAP = "00000100-0000-1000-8000-00805F9B34FB"
-
-
-class BluetoothProfileState(Enum):
-    # Should be kept in sync with BluetoothProfile#STATE_* constants.
-    STATE_DISCONNECTED = 0
-    STATE_CONNECTING = 1
-    STATE_CONNECTED = 2
-    STATE_DISCONNECTING = 3
-
-
-class BluetoothAccessLevel(Enum):
-    # Access Levels from BluetoothDevice.
-    ACCESS_ALLOWED = 1
-    ACCESS_DENIED = 2
-
-
-class BluetoothPriorityLevel(Enum):
-    # Priority levels as defined in BluetoothProfile.java.
-    PRIORITY_AUTO_CONNECT = 1000
-    PRIORITY_ON = 100
-    PRIORITY_OFF = 0
-    PRIORITY_UNDEFINED = -1
-
-class BluetoothA2dpCodecType(Enum):
-    SBC = 0
-    AAC = 1
-    APTX = 2
-    APTX_HD = 3
-    LDAC = 4
-    MAX = 5
diff --git a/src/antlion/test_utils/bt/BtFunhausBaseTest.py b/src/antlion/test_utils/bt/BtFunhausBaseTest.py
deleted file mode 100644
index 6975685..0000000
--- a/src/antlion/test_utils/bt/BtFunhausBaseTest.py
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test script to automate the Bluetooth Audio Funhaus.
-"""
-from antlion.keys import Config
-from antlion.test_utils.bt.BtMetricsBaseTest import BtMetricsBaseTest
-from antlion.test_utils.bt.bt_test_utils import bluetooth_enabled_check
-from antlion.utils import bypass_setup_wizard
-from antlion.utils import exe_cmd
-from antlion.utils import sync_device_time
-import time
-import os
-
-BT_CONF_PATH = "/data/misc/bluedroid/bt_config.conf"
-
-
-class BtFunhausBaseTest(BtMetricsBaseTest):
-    """
-    Base class for Bluetooth A2DP audio tests, this class is in charge of
-    pushing link key to Android device so that it could be paired with remote
-    A2DP device, pushing music to Android device, playing audio, monitoring
-    audio play, and stop playing audio
-    """
-    music_file_to_play = ""
-    device_fails_to_connect_list = []
-
-    def __init__(self, controllers):
-        BtMetricsBaseTest.__init__(self, controllers)
-        self.ad = self.android_devices[0]
-        self.dongle = self.relay_devices[0]
-
-    def _pair_devices(self):
-        self.ad.droid.bluetoothStartPairingHelper(False)
-        self.dongle.enter_pairing_mode()
-
-        self.ad.droid.bluetoothBond(self.dongle.mac_address)
-
-        end_time = time.time() + 20
-        self.ad.log.info("Verifying devices are bonded")
-        while time.time() < end_time:
-            bonded_devices = self.ad.droid.bluetoothGetBondedDevices()
-
-            for d in bonded_devices:
-                if d['address'] == self.dongle.mac_address:
-                    self.ad.log.info("Successfully bonded to device.")
-                    self.log.info("Bonded devices:\n{}".format(bonded_devices))
-                return True
-        self.ad.log.info("Failed to bond devices.")
-        return False
-
-    def setup_test(self):
-        super(BtFunhausBaseTest, self).setup_test()
-        self.dongle.setup()
-        tries = 5
-        # Since we are not concerned with pairing in this test, try 5 times.
-        while tries > 0:
-            if self._pair_devices():
-                return True
-            else:
-                tries -= 1
-        return False
-
-    def teardown_test(self):
-        super(BtFunhausBaseTest, self).teardown_test()
-        self.dongle.clean_up()
-        return True
-
-    def on_fail(self, test_name, begin_time):
-        self.dongle.clean_up()
-        self._collect_bluetooth_manager_dumpsys_logs(self.android_devices)
-        super(BtFunhausBaseTest, self).on_fail(test_name, begin_time)
-
-    def setup_class(self):
-        if not super(BtFunhausBaseTest, self).setup_class():
-            return False
-        for ad in self.android_devices:
-            sync_device_time(ad)
-            # Disable Bluetooth HCI Snoop Logs for audio tests
-            ad.adb.shell("setprop persist.bluetooth.btsnoopenable false")
-            if not bypass_setup_wizard(ad):
-                self.log.debug(
-                    "Failed to bypass setup wizard, continuing test.")
-                # Add music to the Android device
-        return self._add_music_to_android_device(ad)
-
-    def _add_music_to_android_device(self, ad):
-        """
-        Add music to Android device as specified by the test config
-        :param ad: Android device
-        :return: True on success, False on failure
-        """
-        self.log.info("Pushing music to the Android device.")
-        music_path_str = "bt_music"
-        android_music_path = "/sdcard/Music/"
-        if music_path_str not in self.user_params:
-            self.log.error("Need music for audio testcases...")
-            return False
-        music_path = self.user_params[music_path_str]
-        if type(music_path) is list:
-            self.log.info("Media ready to push as is.")
-        elif not os.path.isdir(music_path):
-            music_path = os.path.join(
-                self.user_params[Config.key_config_path.value], music_path)
-            if not os.path.isdir(music_path):
-                self.log.error(
-                    "Unable to find music directory {}.".format(music_path))
-                return False
-        if type(music_path) is list:
-            for item in music_path:
-                self.music_file_to_play = item
-                ad.adb.push("{} {}".format(item, android_music_path))
-        else:
-            for dirname, dirnames, filenames in os.walk(music_path):
-                for filename in filenames:
-                    self.music_file_to_play = filename
-                    file = os.path.join(dirname, filename)
-                    # TODO: Handle file paths with spaces
-                    ad.adb.push("{} {}".format(file, android_music_path))
-        ad.reboot()
-        return True
-
-    def _collect_bluetooth_manager_dumpsys_logs(self, ads):
-        """
-        Collect "adb shell dumpsys bluetooth_manager" logs
-        :param ads: list of active Android devices
-        :return: None
-        """
-        for ad in ads:
-            serial = ad.serial
-            out_name = "{}_{}".format(serial, "bluetooth_dumpsys.txt")
-            dumpsys_path = ''.join((ad.log_path, "/BluetoothDumpsys"))
-            os.makedirs(dumpsys_path, exist_ok=True)
-            cmd = ''.join(
-                ("adb -s ", serial, " shell dumpsys bluetooth_manager > ",
-                 dumpsys_path, "/", out_name))
-            exe_cmd(cmd)
-
-    def start_playing_music_on_all_devices(self):
-        """
-        Start playing music
-        :return: None
-        """
-        self.ad.droid.mediaPlayOpen("file:///sdcard/Music/{}".format(
-            self.music_file_to_play.split("/")[-1]))
-        self.ad.droid.mediaPlaySetLooping(True)
-        self.ad.log.info("Music is now playing.")
-
-    def monitor_music_play_util_deadline(self, end_time, sleep_interval=1):
-        """
-        Monitor music play on all devices, if a device's Bluetooth adapter is
-        OFF or if a device is not connected to any remote Bluetooth devices,
-        we add them to failure lists bluetooth_off_list and
-        device_not_connected_list respectively
-        :param end_time: The deadline in epoch floating point seconds that we
-            must stop playing
-        :param sleep_interval: How often to monitor, too small we may drain
-            too much resources on Android, too big the deadline might be passed
-            by a maximum of this amount
-        :return:
-            status: False iff all devices are off or disconnected otherwise True
-            bluetooth_off_list: List of ADs that have Bluetooth at OFF state
-            device_not_connected_list: List of ADs with no remote device
-                                        connected
-        """
-        device_not_connected_list = []
-        while time.time() < end_time:
-            if not self.ad.droid.bluetoothCheckState():
-                self.ad.log.error("Device {}'s Bluetooth state is off.".format(
-                    self.ad.serial))
-                return False
-            if self.ad.droid.bluetoothGetConnectedDevices() == 0:
-                self.ad.log.error(
-                    "Bluetooth device not connected. Failing test.")
-            time.sleep(sleep_interval)
-        return True
-
-    def play_music_for_duration(self, duration, sleep_interval=1):
-        """
-        A convenience method for above methods. It starts run music on all
-        devices, monitors the health of music play and stops playing them when
-        time passes the duration
-        :param duration: Duration in floating point seconds
-        :param sleep_interval: How often to check the health of music play
-        :return:
-            status: False iff all devices are off or disconnected otherwise True
-            bluetooth_off_list: List of ADs that have Bluetooth at OFF state
-            device_not_connected_list: List of ADs with no remote device
-                                        connected
-        """
-        start_time = time.time()
-        end_time = start_time + duration
-        self.start_playing_music_on_all_devices()
-        status = self.monitor_music_play_util_deadline(end_time,
-                                                       sleep_interval)
-        self.ad.droid.mediaPlayStopAll()
-        return status
diff --git a/src/antlion/test_utils/bt/BtInterferenceBaseTest.py b/src/antlion/test_utils/bt/BtInterferenceBaseTest.py
deleted file mode 100644
index 181a0da..0000000
--- a/src/antlion/test_utils/bt/BtInterferenceBaseTest.py
+++ /dev/null
@@ -1,283 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Stream music through connected device from phone across different
-attenuations."""
-
-import json
-import math
-import time
-import logging
-import antlion.controllers.iperf_client as ipc
-import antlion.controllers.iperf_server as ipf
-import antlion.test_utils.bt.bt_test_utils as btutils
-from antlion import asserts
-from antlion.test_utils.bt.A2dpBaseTest import A2dpBaseTest
-from antlion.test_utils.bt.loggers import bluetooth_metric_logger as log
-from antlion.test_utils.wifi import wifi_performance_test_utils as wpeutils
-from antlion.test_utils.wifi import wifi_power_test_utils as wputils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.power.PowerBaseTest import ObjNew
-
-MAX_ATTENUATION = 95
-TEMP_FILE = '/sdcard/Download/tmp.log'
-IPERF_CLIENT_ERROR = 'the client has unexpectedly closed the connection'
-
-
-def setup_ap_connection(dut, network, ap, bandwidth=20):
-    """Setup AP and connect DUT to it.
-
-    Args:
-        dut: the android device to connect and run traffic
-        network: the network config for the AP to be setup
-        ap: access point object
-        bandwidth: bandwidth of the WiFi network to be setup
-    Returns:
-        brconfigs: dict for bridge interface configs
-    """
-    wutils.wifi_toggle_state(dut, True)
-    brconfigs = wputils.ap_setup(ap, network, bandwidth=bandwidth)
-    wutils.wifi_connect(dut, network, num_of_tries=3)
-    return brconfigs
-
-
-def start_iperf_client(traffic_pair_obj, duration):
-    """Setup iperf traffic for TCP downlink.
-    Args:
-        traffic_pair_obj: obj to contain info on traffic pair
-        duration: duration of iperf traffic to run
-    """
-    # Construct the iperf command based on the test params
-    iperf_cmd = 'iperf3 -c {} -i 1 -t {} -p {} -J -R > {}'.format(
-        traffic_pair_obj.server_address, duration,
-        traffic_pair_obj.iperf_server.port, TEMP_FILE)
-    # Start IPERF client
-    traffic_pair_obj.dut.adb.shell_nb(iperf_cmd)
-
-
-def unpack_custom_file(file):
-    """Unpack the json file to .
-
-    Args:
-        file: custom json file.
-    """
-    with open(file, 'r') as f:
-        params = json.load(f)
-    return params
-
-
-def get_iperf_results(iperf_server_obj):
-    """Get the iperf results and process.
-
-    Args:
-        iperf_server_obj: the IperfServer object
-    Returns:
-         throughput: the average throughput during tests.
-    """
-    # Get IPERF results and add this to the plot title
-    iperf_file = iperf_server_obj.log_files[-1]
-    try:
-        iperf_result = ipf.IPerfResult(iperf_file)
-        # Compute the throughput in Mbit/s
-        if iperf_result.error == IPERF_CLIENT_ERROR:
-            rates = []
-            for item in iperf_result.result['intervals']:
-                rates.append(item['sum']['bits_per_second'] / 8 / 1024 / 1024)
-            throughput = ((math.fsum(rates) / len(rates))) * 8 * (1.024**2)
-        else:
-            throughput = (math.fsum(iperf_result.instantaneous_rates) / len(
-                iperf_result.instantaneous_rates)) * 8 * (1.024**2)
-    except (ValueError, TypeError):
-        throughput = 0
-    return throughput
-
-
-def locate_interference_pair_by_channel(wifi_int_pairs, interference_channels):
-    """Function to find which attenautor to set based on channel info
-    Args:
-        interference_channels: list of interference channels
-    Return:
-        interference_pair_indices: list of indices for interference pair
-            in wifi_int_pairs
-    """
-    interference_pair_indices = []
-    for chan in interference_channels:
-        for i in range(len(wifi_int_pairs)):
-            if wifi_int_pairs[i].channel == chan:
-                interference_pair_indices.append(i)
-    return interference_pair_indices
-
-
-def inject_static_wifi_interference(wifi_int_pairs, interference_level,
-                                    channels):
-    """Function to inject wifi interference to bt link and read rssi.
-
-    Interference of IPERF traffic is always running, by setting attenuation,
-    the gate is opened to release the interference to the setup.
-    Args:
-        interference_level: the signal strength of wifi interference, use
-            attenuation level to represent this
-        channels: wifi channels where interference will
-            be injected, list
-    """
-    all_pair = range(len(wifi_int_pairs))
-    interference_pair_indices = locate_interference_pair_by_channel(
-        wifi_int_pairs, channels)
-    inactive_interference_pairs_indices = [
-        item for item in all_pair if item not in interference_pair_indices
-    ]
-    logging.info('WiFi interference at {} and inactive channels at {}'.format(
-        interference_pair_indices, inactive_interference_pairs_indices))
-    for i in interference_pair_indices:
-        wifi_int_pairs[i].attenuator.set_atten(interference_level)
-        logging.info('Set attenuation {} dB on attenuator {}'.format(
-            wifi_int_pairs[i].attenuator.get_atten(), i + 1))
-    for i in inactive_interference_pairs_indices:
-        wifi_int_pairs[i].attenuator.set_atten(MAX_ATTENUATION)
-        logging.info('Set attenuation {} dB on attenuator {}'.format(
-            wifi_int_pairs[i].attenuator.get_atten(), i + 1))
-
-
-class BtInterferenceBaseTest(A2dpBaseTest):
-    def __init__(self, configs):
-        super().__init__(configs)
-        self.bt_logger = log.BluetoothMetricLogger.for_test_case()
-        self.start_time = time.time()
-        req_params = [
-            'attenuation_vector', 'wifi_networks', 'codecs', 'custom_files',
-            'audio_params'
-        ]
-        self.unpack_userparams(req_params)
-        for file in self.custom_files:
-            if 'static_interference' in file:
-                self.static_wifi_interference = unpack_custom_file(file)
-            elif 'dynamic_interference' in file:
-                self.dynamic_wifi_interference = unpack_custom_file(file)
-
-    def setup_class(self):
-        super().setup_class()
-        # Build object to store all necessary information for each pair of wifi
-        # interference setup: phone, ap, network, channel, iperf server port/ip
-        # object and bridge interface configs
-        if len(self.android_devices) < 5 or len(self.attenuators) < 4:
-            self.log.error('Need a 4 channel attenuator and 5 android phones'
-                           'please update the config file')
-        self.wifi_int_pairs = []
-        for i in range(len(self.attenuators) - 1):
-            tmp_dict = {
-                'dut': self.android_devices[i + 1],
-                'ap': self.access_points[i],
-                'network': self.wifi_networks[i],
-                'channel': self.wifi_networks[i]['channel'],
-                'iperf_server': self.iperf_servers[i],
-                'attenuator': self.attenuators[i + 1],
-                'ether_int': self.packet_senders[i],
-                'iperf_client':
-                ipc.IPerfClientOverAdb(self.android_devices[i + 1])
-            }
-            tmp_obj = ObjNew(**tmp_dict)
-            self.wifi_int_pairs.append(tmp_obj)
-        ##Setup connection between WiFi APs and Phones and get DHCP address
-        # for the interface
-        for obj in self.wifi_int_pairs:
-            brconfigs = setup_ap_connection(obj.dut, obj.network, obj.ap)
-            iperf_server_address = wputils.wait_for_dhcp(
-                obj.ether_int.interface)
-            setattr(obj, 'server_address', iperf_server_address)
-            setattr(obj, 'brconfigs', brconfigs)
-            obj.attenuator.set_atten(MAX_ATTENUATION)
-        # Enable BQR on master and slave Android device
-        btutils.enable_bqr(self.dut)
-        btutils.enable_bqr(self.bt_device_controller)
-
-    def teardown_class(self):
-        super().teardown_class()
-        for obj in self.wifi_int_pairs:
-            obj.ap.bridge.teardown(obj.brconfigs)
-            self.log.info('Stop IPERF server at port {}'.format(
-                obj.iperf_server.port))
-            obj.iperf_server.stop()
-            self.log.info('Stop IPERF process on {}'.format(obj.dut.serial))
-            #only for glinux machine
-            #            wputils.bring_down_interface(obj.ether_int.interface)
-            obj.attenuator.set_atten(MAX_ATTENUATION)
-            obj.ap.close()
-
-    def teardown_test(self):
-
-        super().teardown_test()
-        for obj in self.wifi_int_pairs:
-            obj.attenuator.set_atten(MAX_ATTENUATION)
-
-    def play_and_record_audio(self, duration, queue):
-        """Play and record audio for a set duration.
-
-        Args:
-            duration: duration in seconds for music playing
-            que: multiprocess que to store the return value of this function
-        Returns:
-            audio_captured: captured audio file path
-        """
-
-        self.log.info('Play and record audio for {} second'.format(duration))
-        self.media.play()
-        self.audio_device.start()
-        time.sleep(duration)
-        audio_captured = self.audio_device.stop()
-        self.media.stop()
-        self.log.info('Audio play and record stopped')
-        asserts.assert_true(audio_captured, 'Audio not recorded')
-        queue.put(audio_captured)
-
-    def locate_interference_pair_by_channel(self, interference_channels):
-        """Function to find which attenautor to set based on channel info
-        Args:
-            interference_channels: list of interference channels
-        Return:
-            interference_pair_indices: list of indices for interference pair
-                in self.wifi_int_pairs
-        """
-        interference_pair_indices = []
-        for chan in interference_channels:
-            for i in range(len(self.wifi_int_pairs)):
-                if self.wifi_int_pairs[i].channel == chan:
-                    interference_pair_indices.append(i)
-        return interference_pair_indices
-
-    def get_interference_rssi(self):
-        """Function to read wifi interference RSSI level."""
-
-        bssids = []
-        self.interference_rssi = []
-        wutils.wifi_toggle_state(self.android_devices[0], True)
-        for item in self.wifi_int_pairs:
-            ssid = item.network['SSID']
-            bssid = item.ap.get_bssid_from_ssid(ssid, '2g')
-            bssids.append(bssid)
-            interference_rssi_dict = {
-                "ssid": ssid,
-                "bssid": bssid,
-                "chan": item.channel,
-                "rssi": 0
-            }
-            self.interference_rssi.append(interference_rssi_dict)
-        scaned_rssi = wpeutils.get_scan_rssi(self.android_devices[0],
-                                             bssids,
-                                             num_measurements=2)
-        for item in self.interference_rssi:
-            item['rssi'] = scaned_rssi[item['bssid']]['mean']
-            self.log.info('Interference RSSI at channel {} is {} dBm'.format(
-                item['chan'], item['rssi']))
-        wutils.wifi_toggle_state(self.android_devices[0], False)
diff --git a/src/antlion/test_utils/bt/BtMetricsBaseTest.py b/src/antlion/test_utils/bt/BtMetricsBaseTest.py
deleted file mode 100644
index 8abd13d..0000000
--- a/src/antlion/test_utils/bt/BtMetricsBaseTest.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-
-
-class BtMetricsBaseTest(BluetoothBaseTest):
-    """
-    Base class for tests that requires dumping and parsing Bluetooth Metrics
-    """
-
-    def __init__(self, controllers):
-        BluetoothBaseTest.__init__(self, controllers)
-        self.ad = self.android_devices[0]
diff --git a/src/antlion/test_utils/bt/BtSarBaseTest.py b/src/antlion/test_utils/bt/BtSarBaseTest.py
deleted file mode 100644
index eb06837..0000000
--- a/src/antlion/test_utils/bt/BtSarBaseTest.py
+++ /dev/null
@@ -1,732 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import re
-import time
-import logging
-import pandas as pd
-
-from antlion import asserts
-from antlion.libs.proc import job
-from antlion.base_test import BaseTestClass
-
-from antlion.test_utils.bt.bt_power_test_utils import MediaControl
-from antlion.test_utils.bt.ble_performance_test_utils import run_ble_throughput_and_read_rssi
-from antlion.test_utils.abstract_devices.bluetooth_handsfree_abstract_device import BluetoothHandsfreeAbstractDeviceFactory as bt_factory
-
-import antlion.test_utils.bt.bt_test_utils as bt_utils
-import antlion.test_utils.wifi.wifi_performance_test_utils as wifi_utils
-
-PHONE_MUSIC_FILE_DIRECTORY = '/sdcard/Music'
-
-FORCE_SAR_ADB_COMMAND = ('am broadcast -n'
-                         'com.google.android.apps.scone/.coex.TestReceiver -a '
-                         'com.google.android.apps.scone.coex.SIMULATE_STATE ')
-
-SLEEP_DURATION = 2
-
-DEFAULT_DURATION = 5
-DEFAULT_MAX_ERROR_THRESHOLD = 2
-DEFAULT_AGG_MAX_ERROR_THRESHOLD = 2
-FIXED_ATTENUATION = 36
-
-
-class BtSarBaseTest(BaseTestClass):
-    """ Base class for all BT SAR Test classes.
-
-        This class implements functions common to BT SAR test Classes.
-    """
-    BACKUP_BT_SAR_TABLE_NAME = 'backup_bt_sar_table.csv'
-
-    def __init__(self, controllers):
-        BaseTestClass.__init__(self, controllers)
-        self.power_file_paths = [
-            '/vendor/etc/bluetooth_power_limits.csv',
-            '/data/vendor/radio/bluetooth_power_limits.csv'
-        ]
-        self.sar_file_name = os.path.basename(self.power_file_paths[0])
-        self.power_column = 'BluetoothPower'
-        self.REG_DOMAIN_DICT = {
-            ('us', 'ca', 'in'): 'US',
-            ('uk', 'fr', 'es', 'de', 'it', 'ie', 'sg', 'au', 'tw'): 'EU',
-            ('jp', ): 'JP'
-        }
-
-    def setup_class(self):
-        """Initializes common test hardware and parameters.
-
-        This function initializes hardware and compiles parameters that are
-        common to all tests in this class and derived classes.
-        """
-        super().setup_class()
-
-        self.test_params = self.user_params.get('bt_sar_test_params', {})
-        if not self.test_params:
-            self.log.warning(
-                'bt_sar_test_params was not found in the config file.')
-
-        self.user_params.update(self.test_params)
-        req_params = ['bt_devices', 'calibration_params', 'custom_files']
-
-        self.unpack_userparams(
-            req_params,
-            country_code='us',
-            duration=DEFAULT_DURATION,
-            sort_order=None,
-            max_error_threshold=DEFAULT_MAX_ERROR_THRESHOLD,
-            agg_error_threshold=DEFAULT_AGG_MAX_ERROR_THRESHOLD,
-            tpc_threshold=[2, 8],
-            sar_margin={
-                'BDR': 0,
-                'EDR': 0,
-                'BLE': 0
-            })
-
-        self.attenuator = self.attenuators[0]
-        self.dut = self.android_devices[0]
-
-        for key in self.REG_DOMAIN_DICT.keys():
-            if self.country_code.lower() in key:
-                self.reg_domain = self.REG_DOMAIN_DICT[key]
-
-        self.sar_version_2 = False
-
-        if 'Error' not in self.dut.adb.shell('bluetooth_sar_test -r'):
-            #Flag for SAR version 2
-            self.sar_version_2 = True
-            self.power_column = 'BluetoothEDRPower'
-            self.power_file_paths[0] = os.path.join(
-                os.path.dirname(self.power_file_paths[0]),
-                'bluetooth_power_limits_{}.csv'.format(self.reg_domain))
-            self.sar_file_name = os.path.basename(self.power_file_paths[0])
-
-        if self.sar_version_2:
-            custom_file_suffix = 'version2'
-        else:
-            custom_file_suffix = 'version1'
-
-        for file in self.custom_files:
-            if 'custom_sar_table_{}.csv'.format(custom_file_suffix) in file:
-                self.custom_sar_path = file
-                break
-        else:
-            raise RuntimeError('Custom Sar File is missing')
-
-        self.sar_file_path = self.power_file_paths[0]
-        self.atten_min = 0
-        self.atten_max = int(self.attenuator.get_max_atten())
-
-        # Get music file and push it to the phone and initialize Media controller
-        music_files = self.user_params.get('music_files', [])
-        if music_files:
-            music_src = music_files[0]
-            music_dest = PHONE_MUSIC_FILE_DIRECTORY
-            success = self.dut.push_system_file(music_src, music_dest)
-            if success:
-                self.music_file = os.path.join(PHONE_MUSIC_FILE_DIRECTORY,
-                                               os.path.basename(music_src))
-            # Initialize media_control class
-            self.media = MediaControl(self.dut, self.music_file)
-
-        #Initializing BT device controller
-        if self.bt_devices:
-            attr, idx = self.bt_devices.split(':')
-            self.bt_device_controller = getattr(self, attr)[int(idx)]
-            self.bt_device = bt_factory().generate(self.bt_device_controller)
-        else:
-            self.log.error('No BT devices config is provided!')
-
-        bt_utils.enable_bqr(self.android_devices)
-
-        self.log_path = os.path.join(logging.log_path, 'results')
-        os.makedirs(self.log_path, exist_ok=True)
-
-        # Reading BT SAR table from the phone
-        self.bt_sar_df = self.read_sar_table(self.dut)
-
-    def setup_test(self):
-        super().setup_test()
-
-        # Starting BT on the master
-        self.dut.droid.bluetoothFactoryReset()
-        bt_utils.enable_bluetooth(self.dut.droid, self.dut.ed)
-
-        # Starting BT on the slave
-        self.bt_device.reset()
-        self.bt_device.power_on()
-
-        # Connect master and slave
-        bt_utils.connect_phone_to_headset(self.dut, self.bt_device, 60)
-
-        # Playing music
-        self.media.play()
-
-        # Find and set PL10 level for the DUT
-        self.pl10_atten = self.set_PL10_atten_level(self.dut)
-        self.attenuator.set_atten(self.pl10_atten)
-
-    def teardown_test(self):
-        #Stopping Music
-        if hasattr(self, 'media'):
-            self.media.stop()
-
-        # Stopping BT on slave
-        self.bt_device.reset()
-        self.bt_device.power_off()
-
-        #Stopping BT on master
-        bt_utils.disable_bluetooth(self.dut.droid)
-
-        #Resetting the atten to initial levels
-        self.attenuator.set_atten(self.atten_min)
-        self.log.info('Attenuation set to {} dB'.format(self.atten_min))
-
-    def teardown_class(self):
-
-        super().teardown_class()
-        self.dut.droid.bluetoothFactoryReset()
-
-        # Stopping BT on slave
-        self.bt_device.reset()
-        self.bt_device.power_off()
-
-        #Stopping BT on master
-        bt_utils.disable_bluetooth(self.dut.droid)
-
-    def save_sar_plot(self, df):
-        """ Saves SAR plot to the path given.
-
-        Args:
-            df: Processed SAR table sweep results
-        """
-        self.plot.add_line(
-            df.index,
-            df['expected_tx_power'],
-            legend='expected',
-            marker='circle')
-        self.plot.add_line(
-            df.index,
-            df['measured_tx_power'],
-            legend='measured',
-            marker='circle')
-        self.plot.add_line(
-            df.index, df['delta'], legend='delta', marker='circle')
-
-        results_file_path = os.path.join(self.log_path, '{}.html'.format(
-            self.current_test_name))
-        self.plot.generate_figure()
-        wifi_utils.BokehFigure.save_figures([self.plot], results_file_path)
-
-    def sweep_power_cap(self):
-        sar_df = self.bt_sar_df
-        sar_df['BDR_power_cap'] = -128
-        sar_df['EDR_power_cap'] = -128
-        sar_df['BLE_power_cap'] = -128
-
-        if self.sar_version_2:
-            power_column_dict = {
-                'BDR': 'BluetoothBDRPower',
-                'EDR': 'BluetoothEDRPower',
-                'BLE': 'BluetoothLEPower'
-            }
-        else:
-            power_column_dict = {'EDR': self.power_column}
-
-        power_cap_error = False
-
-        for type, column_name in power_column_dict.items():
-
-            self.log.info("Performing sanity test on {}".format(type))
-            # Iterating through the BT SAR scenarios
-            for scenario in range(0, self.bt_sar_df.shape[0]):
-                # Reading BT SAR table row into dict
-                read_scenario = sar_df.loc[scenario].to_dict()
-                start_time = self.dut.adb.shell('date +%s.%m')
-                time.sleep(SLEEP_DURATION)
-
-                # Setting SAR state to the read BT SAR row
-                self.set_sar_state(self.dut, read_scenario, self.country_code)
-
-                # Reading device power cap from logcat after forcing SAR State
-                scenario_power_cap = self.get_current_power_cap(
-                    self.dut, start_time, type=type)
-                sar_df.loc[scenario, '{}_power_cap'.format(
-                    type)] = scenario_power_cap
-                self.log.info(
-                    'scenario: {}, '
-                    'sar_power: {}, power_cap:{}'.format(
-                        scenario, sar_df.loc[scenario, column_name],
-                        sar_df.loc[scenario, '{}_power_cap'.format(type)]))
-
-        if not sar_df['{}_power_cap'.format(type)].equals(sar_df[column_name]):
-            power_cap_error = True
-
-        results_file_path = os.path.join(self.log_path, '{}.csv'.format(
-            self.current_test_name))
-        sar_df.to_csv(results_file_path)
-
-        return power_cap_error
-
-    def sweep_table(self,
-                    client_ad=None,
-                    server_ad=None,
-                    client_conn_id=None,
-                    gatt_server=None,
-                    gatt_callback=None,
-                    isBLE=False):
-        """Iterates over the BT SAR table and forces signal states.
-
-        Iterates over BT SAR table and forces signal states,
-        measuring RSSI and power level for each state.
-
-        Args:
-            client_ad: the Android device performing the connection.
-            server_ad: the Android device accepting the connection.
-            client_conn_id: the client connection ID.
-            gatt_server: the gatt server
-            gatt_callback: Gatt callback objec
-            isBLE : boolean variable for BLE connection
-        Returns:
-            sar_df : SAR table sweep results in pandas dataframe
-        """
-
-        sar_df = self.bt_sar_df.copy()
-        sar_df['power_cap'] = -128
-        sar_df['slave_rssi'] = -128
-        sar_df['master_rssi'] = -128
-        sar_df['ble_rssi'] = -128
-        sar_df['pwlv'] = -1
-
-        # Sorts the table
-        if self.sort_order:
-            if self.sort_order.lower() == 'ascending':
-                sar_df = sar_df.sort_values(
-                    by=[self.power_column], ascending=True)
-            else:
-                sar_df = sar_df.sort_values(
-                    by=[self.power_column], ascending=False)
-            sar_df = sar_df.reset_index(drop=True)
-
-        # Sweeping BT SAR table
-        for scenario in range(sar_df.shape[0]):
-            # Reading BT SAR Scenario from the table
-            read_scenario = sar_df.loc[scenario].to_dict()
-
-            start_time = self.dut.adb.shell('date +%s.%m')
-            time.sleep(SLEEP_DURATION)
-
-            #Setting SAR State
-            self.set_sar_state(self.dut, read_scenario, self.country_code)
-
-            if isBLE:
-                sar_df.loc[scenario, 'power_cap'] = self.get_current_power_cap(
-                    self.dut, start_time, type='BLE')
-
-                sar_df.loc[
-                    scenario, 'ble_rssi'] = run_ble_throughput_and_read_rssi(
-                        client_ad, server_ad, client_conn_id, gatt_server,
-                        gatt_callback)
-
-                self.log.info('scenario:{}, power_cap:{},  ble_rssi:{}'.format(
-                    scenario, sar_df.loc[scenario, 'power_cap'],
-                    sar_df.loc[scenario, 'ble_rssi']))
-            else:
-                sar_df.loc[scenario, 'power_cap'] = self.get_current_power_cap(
-                    self.dut, start_time)
-
-                processed_bqr_results = bt_utils.get_bt_metric(
-                    self.android_devices, self.duration)
-                sar_df.loc[scenario, 'slave_rssi'] = processed_bqr_results[
-                    'rssi'][self.bt_device_controller.serial]
-                sar_df.loc[scenario, 'master_rssi'] = processed_bqr_results[
-                    'rssi'][self.dut.serial]
-                sar_df.loc[scenario, 'pwlv'] = processed_bqr_results['pwlv'][
-                    self.dut.serial]
-                self.log.info(
-                    'scenario:{}, power_cap:{},  s_rssi:{}, m_rssi:{}, m_pwlv:{}'
-                    .format(scenario, sar_df.loc[scenario, 'power_cap'],
-                            sar_df.loc[scenario, 'slave_rssi'],
-                            sar_df.loc[scenario, 'master_rssi'],
-                            sar_df.loc[scenario, 'pwlv']))
-
-        self.log.info('BT SAR Table swept')
-
-        return sar_df
-
-    def process_table(self, sar_df):
-        """Processes the results of sweep_table and computes BT TX power.
-
-        Processes the results of sweep_table and computes BT TX power
-        after factoring in the path loss and FTM offsets.
-
-        Args:
-             sar_df: BT SAR table after the sweep
-
-        Returns:
-            sar_df: processed BT SAR table
-        """
-
-        sar_df['pathloss'] = self.calibration_params['pathloss']
-
-        if hasattr(self, 'pl10_atten'):
-            sar_df['atten'] = self.pl10_atten
-        else:
-            sar_df['atten'] = FIXED_ATTENUATION
-
-        # BT SAR Backoff for each scenario
-        if self.sar_version_2:
-            #Reads OTP values from the phone
-            self.otp = bt_utils.read_otp(self.dut)
-
-            #OTP backoff
-            edr_otp = min(0, float(self.otp['EDR']['10']))
-            bdr_otp = min(0, float(self.otp['BR']['10']))
-            ble_otp = min(0, float(self.otp['BLE']['10']))
-
-            # EDR TX Power for PL10
-            edr_tx_power_pl10 = self.calibration_params['target_power']['EDR']['10'] - edr_otp
-
-            # BDR TX Power for PL10
-            bdr_tx_power_pl10 = self.calibration_params['target_power']['BDR']['10'] - bdr_otp
-
-            # RSSI being measured is BDR
-            offset = bdr_tx_power_pl10 - edr_tx_power_pl10
-
-            # BDR-EDR offset
-            sar_df['offset'] = offset
-
-            # Max TX power permissible
-            sar_df['max_power'] = self.calibration_params['max_power']
-
-            # Adding a target power column
-            if 'ble_rssi' in sar_df.columns:
-                sar_df[
-                    'target_power'] = self.calibration_params['target_power']['BLE']['10'] - ble_otp
-            else:
-                sar_df['target_power'] = sar_df['pwlv'].astype(str).map(
-                    self.calibration_params['target_power']['EDR']) - edr_otp
-
-            #Translates power_cap values to expected TX power level
-            sar_df['cap_tx_power'] = sar_df['power_cap'] / 4.0
-
-            sar_df['expected_tx_power'] = sar_df[[
-                'cap_tx_power', 'target_power', 'max_power'
-            ]].min(axis=1)
-
-            if hasattr(self, 'pl10_atten'):
-                sar_df[
-                    'measured_tx_power'] = sar_df['slave_rssi'] + sar_df['pathloss'] + self.pl10_atten - offset
-            else:
-                sar_df[
-                    'measured_tx_power'] = sar_df['ble_rssi'] + sar_df['pathloss'] + FIXED_ATTENUATION
-
-        else:
-
-            # Adding a target power column
-            sar_df['target_power'] = sar_df['pwlv'].astype(str).map(
-                self.calibration_params['target_power']['EDR']['10'])
-
-            # Adding a ftm  power column
-            sar_df['ftm_power'] = sar_df['pwlv'].astype(str).map(
-                self.calibration_params['ftm_power']['EDR'])
-            sar_df[
-                'backoff'] = sar_df['target_power'] - sar_df['power_cap'] / 4.0
-
-            sar_df[
-                'expected_tx_power'] = sar_df['ftm_power'] - sar_df['backoff']
-            sar_df[
-                'measured_tx_power'] = sar_df['slave_rssi'] + sar_df['pathloss'] + self.pl10_atten
-
-        sar_df[
-            'delta'] = sar_df['expected_tx_power'] - sar_df['measured_tx_power']
-
-        self.log.info('Sweep results processed')
-
-        results_file_path = os.path.join(self.log_path, self.current_test_name)
-        sar_df.to_csv('{}.csv'.format(results_file_path))
-        self.save_sar_plot(sar_df)
-
-        return sar_df
-
-    def process_results(self, sar_df, type='EDR'):
-        """Determines the test results of the sweep.
-
-         Parses the processed table with computed BT TX power values
-         to return pass or fail.
-
-        Args:
-             sar_df: processed BT SAR table
-        """
-        if self.sar_version_2:
-            breach_error_result = (
-                sar_df['expected_tx_power'] + self.sar_margin[type] >
-                sar_df['measured_tx_power']).all()
-            if not breach_error_result:
-                asserts.fail('Measured TX power exceeds expected')
-
-        else:
-            # checks for errors at particular points in the sweep
-            max_error_result = abs(
-                sar_df['delta']) > self.max_error_threshold[type]
-            if max_error_result:
-                asserts.fail('Maximum Error Threshold Exceeded')
-
-            # checks for error accumulation across the sweep
-            if sar_df['delta'].sum() > self.agg_error_threshold[type]:
-                asserts.fail(
-                    'Aggregate Error Threshold Exceeded. Error: {} Threshold: {}'.
-                    format(sar_df['delta'].sum(), self.agg_error_threshold))
-
-        asserts.explicit_pass('Measured and Expected Power Values in line')
-
-    def set_sar_state(self, ad, signal_dict, country_code='us'):
-        """Sets the SAR state corresponding to the BT SAR signal.
-
-        The SAR state is forced using an adb command that takes
-        device signals as input.
-
-        Args:
-            ad: android_device object.
-            signal_dict: dict of BT SAR signals read from the SAR file.
-        Returns:
-            enforced_state: dict of device signals.
-        """
-        signal_dict = {k: max(int(v), 0) for (k, v) in signal_dict.items()}
-        signal_dict["Wifi"] = signal_dict['WIFI5Ghz']
-        signal_dict['WIFI2Ghz'] = 0 if signal_dict['WIFI5Ghz'] else 1
-
-        device_state_dict = {
-            ('Earpiece', 'earpiece'): signal_dict['Head'],
-            ('Wifi', 'wifi'): signal_dict['WIFI5Ghz'],
-            ('Wifi 2.4G', 'wifi_24g'): signal_dict['WIFI2Ghz'],
-            ('Voice', 'voice'): 0,
-            ('Wifi AP', 'wifi_ap'): signal_dict['HotspotVoice'],
-            ('Bluetooth', 'bluetooth'): 1,
-            ('Bluetooth media', 'bt_media'): signal_dict['BTMedia'],
-            ('Radio', 'radio_power'): signal_dict['Cell'],
-            ('Motion', 'motion'): signal_dict['IMU'],
-            ('Bluetooth connected', 'bt_connected'): 1
-        }
-
-        if 'BTHotspot' in signal_dict.keys():
-            device_state_dict[('Bluetooth tethering',
-                               'bt_tethering')] = signal_dict['BTHotspot']
-
-        enforced_state = {}
-        sar_state_command = FORCE_SAR_ADB_COMMAND
-        for key in device_state_dict:
-            enforced_state[key[0]] = device_state_dict[key]
-            sar_state_command = '{} --ei {} {}'.format(
-                sar_state_command, key[1], device_state_dict[key])
-        if self.sar_version_2:
-            sar_state_command = '{} --es country_iso "{}"'.format(
-                sar_state_command, country_code.lower())
-
-        #Forcing the SAR state
-        adb_output = ad.adb.shell(sar_state_command)
-
-        # Checking if command was successfully enforced
-        if 'result=0' in adb_output:
-            self.log.info('Requested BT SAR state successfully enforced.')
-            return enforced_state
-        else:
-            self.log.error("Couldn't force BT SAR state.")
-
-    def parse_bt_logs(self, ad, begin_time, regex=''):
-        """Returns bt software stats by parsing logcat since begin_time.
-
-        The quantity to be fetched is dictated by the regex provided.
-
-        Args:
-             ad: android_device object.
-             begin_time: time stamp to start the logcat parsing.
-             regex: regex for fetching the required BT software stats.
-
-        Returns:
-             stat: the desired BT stat.
-        """
-        # Waiting for logcat to update
-        time.sleep(SLEEP_DURATION)
-        bt_adb_log = ad.adb.logcat('-b all -t %s' % begin_time)
-        for line in bt_adb_log.splitlines():
-            if re.findall(regex, line):
-                stat = re.findall(regex, line)[0]
-                return stat
-
-    def set_country_code(self, ad, cc):
-        """Sets the SAR regulatory domain as per given country code
-
-        The SAR regulatory domain is forced using an adb command that takes
-        country code as input.
-
-        Args:
-            ad: android_device object.
-            cc: country code
-        """
-
-        ad.adb.shell("{} --es country_iso {}".format(FORCE_SAR_ADB_COMMAND,
-                                                     cc))
-        self.log.info("Country Code set to {}".format(cc))
-
-    def get_country_code(self, ad, begin_time):
-        """Returns the enforced regulatory domain since begin_time
-
-        Returns enforced regulatory domain since begin_time by parsing logcat.
-        Function should follow a function call to set a country code
-
-        Args:
-            ad : android_device obj
-            begin_time: time stamp to start
-
-        Returns:
-            read enforced regulatory domain
-        """
-
-        reg_domain_regex = "updateRegulatoryDomain:\s+(\S+)"
-        reg_domain = self.parse_bt_logs(ad, begin_time, reg_domain_regex)
-        return reg_domain
-
-    def get_current_power_cap(self, ad, begin_time, type='EDR'):
-        """ Returns the enforced software EDR power cap since begin_time.
-
-        Returns the enforced EDR power cap since begin_time by parsing logcat.
-        Function should follow a function call that forces a SAR state
-
-        Args:
-            ad: android_device obj.
-            begin_time: time stamp to start.
-
-        Returns:
-            read enforced power cap
-        """
-        power_cap_regex_dict = {
-            'BDR': [
-                'Bluetooth powers: BR:\s+(\d+), EDR:\s+\d+',
-                'Bluetooth Tx Power Cap\s+(\d+)'
-            ],
-            'EDR': [
-                'Bluetooth powers: BR:\s+\d+, EDR:\s+(\d+)',
-                'Bluetooth Tx Power Cap\s+(\d+)'
-            ],
-            'BLE': [
-                'Bluetooth powers: BR:\s+\d+, EDR:\s+\d+, BLE:\s+(\d+)',
-                'Bluetooth Tx Power Cap\s+(\d+)'
-            ]
-        }
-
-        power_cap_regex_list = power_cap_regex_dict[type]
-
-        for power_cap_regex in power_cap_regex_list:
-            power_cap = self.parse_bt_logs(ad, begin_time, power_cap_regex)
-            if power_cap:
-                return int(power_cap)
-
-        raise ValueError('Failed to get TX power cap')
-
-    def get_current_device_state(self, ad, begin_time):
-        """ Returns the device state of the android dut since begin_time.
-
-        Returns the device state of the android dut by parsing logcat since
-        begin_time. Function should follow a function call that forces
-        a SAR state.
-
-        Args:
-            ad: android_device obj.
-            begin_time: time stamp to start.
-
-        Returns:
-            device_state: device state of the android device.
-        """
-
-        device_state_regex = 'updateDeviceState: DeviceState: ([\s*\S+\s]+)'
-        time.sleep(SLEEP_DURATION)
-        device_state = self.parse_bt_logs(ad, begin_time, device_state_regex)
-        if device_state:
-            return device_state
-
-        raise ValueError("Couldn't fetch device state")
-
-    def read_sar_table(self, ad, output_path=''):
-        """Extracts the BT SAR table from the phone.
-
-        Extracts the BT SAR table from the phone into the android device
-        log path directory.
-
-        Args:
-            ad: android_device object.
-            output_path: path to custom sar table
-        Returns:
-            df : BT SAR table (as pandas DataFrame).
-        """
-        if not output_path:
-            output_path = os.path.join(ad.device_log_path, self.sar_file_name)
-            ad.adb.pull('{} {}'.format(self.sar_file_path, output_path))
-
-        df = pd.read_csv(output_path)
-        self.log.info('BT SAR table read from the phone')
-        return df
-
-    def push_table(self, ad, src_path, dest_path=''):
-        """Pushes a BT SAR table to the phone.
-
-        Pushes a BT SAR table to the android device and reboots the device.
-        Also creates a backup file if backup flag is True.
-
-        Args:
-            ad: android_device object.
-            src_path: path to the  BT SAR table.
-        """
-        #Copying the to-be-pushed file for logging
-        if os.path.dirname(src_path) != ad.device_log_path:
-            job.run('cp {} {}'.format(src_path, ad.device_log_path))
-
-        #Pushing the file provided in the config
-        if dest_path:
-            ad.push_system_file(src_path, dest_path)
-        else:
-            ad.push_system_file(src_path, self.sar_file_path)
-        self.log.info('BT SAR table pushed')
-        ad.reboot()
-
-        self.bt_sar_df = self.read_sar_table(self.dut, src_path)
-
-    def set_PL10_atten_level(self, ad):
-        """Finds the attenuation level at which the phone is at PL10
-
-        Finds PL10 attenuation level by sweeping the attenuation range.
-        If the power level is not achieved during sweep,
-        returns the max atten level
-
-        Args:
-            ad: android object class
-        Returns:
-            atten : attenuation level when the phone is at PL10
-        """
-        BT_SAR_ATTEN_STEP = 3
-
-        for atten in range(self.atten_min, self.atten_max, BT_SAR_ATTEN_STEP):
-            self.attenuator.set_atten(atten)
-            # Sleep required for BQR to reflect the change in parameters
-            time.sleep(SLEEP_DURATION)
-            metrics = bt_utils.get_bt_metric(ad)
-            if metrics['pwlv'][ad.serial] == 10:
-                self.log.info(
-                    'PL10 located at {}'.format(atten + BT_SAR_ATTEN_STEP))
-                return atten + BT_SAR_ATTEN_STEP
-
-        self.log.warn(
-            "PL10 couldn't be located in the given attenuation range")
diff --git a/src/antlion/test_utils/bt/GattConnectedBaseTest.py b/src/antlion/test_utils/bt/GattConnectedBaseTest.py
deleted file mode 100644
index c131530..0000000
--- a/src/antlion/test_utils/bt/GattConnectedBaseTest.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is base class for tests that exercises different GATT procedures between two connected devices.
-Setup/Teardown methods take care of establishing connection, and doing GATT DB initialization/discovery.
-"""
-
-from queue import Empty
-
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import gatt_event
-from antlion.test_utils.bt.bt_constants import gatt_cb_err
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_mtu_size
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import orchestrate_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_characteristics
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_descriptors
-from antlion.test_utils.bt.bt_constants import gatt_char_desc_uuids
-from antlion.test_utils.bt.bt_constants import bt_default_timeout
-
-
-class GattConnectedBaseTest(BluetoothBaseTest):
-
-    TEST_SERVICE_UUID = "3846D7A0-69C8-11E4-BA00-0002A5D5C51B"
-    READABLE_CHAR_UUID = "21c0a0bf-ad51-4a2d-8124-b74003e4e8c8"
-    READABLE_DESC_UUID = "aa7edd5a-4d1d-4f0e-883a-d145616a1630"
-    WRITABLE_CHAR_UUID = "aa7edd5a-4d1d-4f0e-883a-d145616a1630"
-    WRITABLE_DESC_UUID = "76d5ed92-ca81-4edb-bb6b-9f019665fb32"
-    NOTIFIABLE_CHAR_UUID = "b2c83efa-34ca-11e6-ac61-9e71128cae77"
-
-    def setup_class(self):
-        super().setup_class()
-        self.cen_ad = self.android_devices[0]
-        self.per_ad = self.android_devices[1]
-
-    def setup_test(self):
-        super(GattConnectedBaseTest, self).setup_test()
-
-        self.gatt_server_callback, self.gatt_server = self._setup_multiple_services(
-        )
-        if not self.gatt_server_callback or not self.gatt_server:
-            raise AssertionError('Service setup failed')
-
-        self.bluetooth_gatt, self.gatt_callback, self.adv_callback = (
-            orchestrate_gatt_connection(self.cen_ad, self.per_ad))
-        self.per_ad.droid.bleStopBleAdvertising(self.adv_callback)
-
-        self.mtu = gatt_mtu_size['min']
-
-        if self.cen_ad.droid.gattClientDiscoverServices(self.bluetooth_gatt):
-            event = self._client_wait(gatt_event['gatt_serv_disc'])
-            self.discovered_services_index = event['data']['ServicesIndex']
-        services_count = self.cen_ad.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        self.test_service_index = None
-        for i in range(services_count):
-            disc_service_uuid = (
-                self.cen_ad.droid.gattClientGetDiscoveredServiceUuid(
-                    self.discovered_services_index, i).upper())
-            if disc_service_uuid == self.TEST_SERVICE_UUID:
-                self.test_service_index = i
-                break
-
-        if not self.test_service_index:
-            print("Service not found")
-            return False
-
-        connected_device_list = self.per_ad.droid.gattServerGetConnectedDevices(
-            self.gatt_server)
-        if len(connected_device_list) == 0:
-            self.log.info("No devices connected from peripheral.")
-            return False
-
-        return True
-
-    def teardown_test(self):
-        self.per_ad.droid.gattServerClearServices(self.gatt_server)
-        self.per_ad.droid.gattServerClose(self.gatt_server)
-
-        del self.gatt_server_callback
-        del self.gatt_server
-
-        self._orchestrate_gatt_disconnection(self.bluetooth_gatt,
-                                             self.gatt_callback)
-
-        return super(GattConnectedBaseTest, self).teardown_test()
-
-    def _server_wait(self, gatt_event):
-        return self._timed_pop(gatt_event, self.per_ad,
-                               self.gatt_server_callback)
-
-    def _client_wait(self, gatt_event):
-        return self._timed_pop(gatt_event, self.cen_ad, self.gatt_callback)
-
-    def _timed_pop(self, gatt_event, droid, gatt_callback):
-        expected_event = gatt_event["evt"].format(gatt_callback)
-        try:
-            return droid.ed.pop_event(expected_event, bt_default_timeout)
-        except Empty as emp:
-            raise AssertionError(gatt_event["err"].format(expected_event))
-
-    def _setup_characteristics_and_descriptors(self, droid):
-        characteristic_input = [
-            {
-                'uuid': self.WRITABLE_CHAR_UUID,
-                'property': gatt_characteristic['property_write'] |
-                gatt_characteristic['property_write_no_response'],
-                'permission': gatt_characteristic['permission_write']
-            },
-            {
-                'uuid': self.READABLE_CHAR_UUID,
-                'property': gatt_characteristic['property_read'],
-                'permission': gatt_characteristic['permission_read']
-            },
-            {
-                'uuid': self.NOTIFIABLE_CHAR_UUID,
-                'property': gatt_characteristic['property_notify'] |
-                gatt_characteristic['property_indicate'],
-                'permission': gatt_characteristic['permission_read']
-            },
-        ]
-        descriptor_input = [{
-            'uuid': self.WRITABLE_DESC_UUID,
-            'property': gatt_descriptor['permission_read'] |
-            gatt_characteristic['permission_write'],
-        }, {
-            'uuid': self.READABLE_DESC_UUID,
-            'property': gatt_descriptor['permission_read'] |
-            gatt_descriptor['permission_write'],
-        }, {
-            'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            'property': gatt_descriptor['permission_read'] |
-            gatt_descriptor['permission_write'],
-        }]
-        characteristic_list = setup_gatt_characteristics(droid,
-                                                         characteristic_input)
-        self.notifiable_char_index = characteristic_list[2]
-        descriptor_list = setup_gatt_descriptors(droid, descriptor_input)
-        return characteristic_list, descriptor_list
-
-    def _orchestrate_gatt_disconnection(self, bluetooth_gatt, gatt_callback):
-        self.log.info("Disconnecting from peripheral device.")
-        try:
-            disconnect_gatt_connection(self.cen_ad, bluetooth_gatt,
-                                       gatt_callback)
-        except GattTestUtilsError as err:
-            log.error(err)
-            return False
-        self.cen_ad.droid.gattClientClose(bluetooth_gatt)
-        return True
-
-    def _find_service_added_event(self, gatt_server_callback, uuid):
-        expected_event = gatt_cb_strings['serv_added'].format(
-            gatt_server_callback)
-        try:
-            event = self.per_ad.ed.pop_event(expected_event,
-                                             bt_default_timeout)
-        except Empty:
-            self.log.error(gatt_cb_err['serv_added_err'].format(
-                expected_event))
-            return False
-        if event['data']['serviceUuid'].lower() != uuid.lower():
-            self.log.error("Uuid mismatch. Found: {}, Expected {}.".format(
-                event['data']['serviceUuid'], uuid))
-            return False
-        return True
-
-    def _setup_multiple_services(self):
-        gatt_server_callback = (
-            self.per_ad.droid.gattServerCreateGattServerCallback())
-        gatt_server = self.per_ad.droid.gattServerOpenGattServer(
-            gatt_server_callback)
-        characteristic_list, descriptor_list = (
-            self._setup_characteristics_and_descriptors(self.per_ad.droid))
-        self.per_ad.droid.gattServerCharacteristicAddDescriptor(
-            characteristic_list[0], descriptor_list[0])
-        self.per_ad.droid.gattServerCharacteristicAddDescriptor(
-            characteristic_list[1], descriptor_list[1])
-        self.per_ad.droid.gattServerCharacteristicAddDescriptor(
-            characteristic_list[2], descriptor_list[2])
-        gatt_service3 = self.per_ad.droid.gattServerCreateService(
-            self.TEST_SERVICE_UUID, gatt_service_types['primary'])
-        for characteristic in characteristic_list:
-            self.per_ad.droid.gattServerAddCharacteristicToService(
-                gatt_service3, characteristic)
-        self.per_ad.droid.gattServerAddService(gatt_server, gatt_service3)
-        result = self._find_service_added_event(gatt_server_callback,
-                                                self.TEST_SERVICE_UUID)
-        if not result:
-            return False, False
-        return gatt_server_callback, gatt_server
-
-    def assertEqual(self, first, second, msg=None):
-        if not first == second:
-            if not msg:
-                raise AssertionError('%r != %r' % (first, second))
-            else:
-                raise AssertionError(msg + ' %r != %r' % (first, second))
diff --git a/src/antlion/test_utils/bt/GattEnum.py b/src/antlion/test_utils/bt/GattEnum.py
deleted file mode 100644
index 4b4c171..0000000
--- a/src/antlion/test_utils/bt/GattEnum.py
+++ /dev/null
@@ -1,304 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-from enum import IntEnum
-
-
-class GattCbErr(Enum):
-    CHAR_WRITE_REQ_ERR = "Characteristic Write Request event not found. Expected {}"
-    CHAR_WRITE_ERR = "Characteristic Write event not found. Expected {}"
-    DESC_WRITE_REQ_ERR = "Descriptor Write Request event not found. Expected {}"
-    DESC_WRITE_ERR = "Descriptor Write event not found. Expected {}"
-    CHAR_READ_ERR = "Characteristic Read event not found. Expected {}"
-    CHAR_READ_REQ_ERR = "Characteristic Read Request not found. Expected {}"
-    DESC_READ_ERR = "Descriptor Read event not found. Expected {}"
-    DESC_READ_REQ_ERR = "Descriptor Read Request event not found. Expected {}"
-    RD_REMOTE_RSSI_ERR = "Read Remote RSSI event not found. Expected {}"
-    GATT_SERV_DISC_ERR = "GATT Services Discovered event not found. Expected {}"
-    SERV_ADDED_ERR = "Service Added event not found. Expected {}"
-    MTU_CHANGED_ERR = "MTU Changed event not found. Expected {}"
-    MTU_SERV_CHANGED_ERR = "MTU Server Changed event not found. Expected {}"
-    GATT_CONN_CHANGE_ERR = "GATT Connection Changed event not found. Expected {}"
-    CHAR_CHANGE_ERR = "GATT Characteristic Changed event not fond. Expected {}"
-    PHY_READ_ERR = "Phy Read event not fond. Expected {}"
-    PHY_UPDATE_ERR = "Phy Update event not fond. Expected {}"
-    EXEC_WRITE_ERR = "GATT Execute Write event not found. Expected {}"
-
-
-class GattCbStrings(Enum):
-    CHAR_WRITE_REQ = "GattServer{}onCharacteristicWriteRequest"
-    EXEC_WRITE = "GattServer{}onExecuteWrite"
-    CHAR_WRITE = "GattConnect{}onCharacteristicWrite"
-    DESC_WRITE_REQ = "GattServer{}onDescriptorWriteRequest"
-    DESC_WRITE = "GattConnect{}onDescriptorWrite"
-    CHAR_READ = "GattConnect{}onCharacteristicRead"
-    CHAR_READ_REQ = "GattServer{}onCharacteristicReadRequest"
-    DESC_READ = "GattConnect{}onDescriptorRead"
-    DESC_READ_REQ = "GattServer{}onDescriptorReadRequest"
-    RD_REMOTE_RSSI = "GattConnect{}onReadRemoteRssi"
-    GATT_SERV_DISC = "GattConnect{}onServicesDiscovered"
-    SERV_ADDED = "GattServer{}onServiceAdded"
-    MTU_CHANGED = "GattConnect{}onMtuChanged"
-    MTU_SERV_CHANGED = "GattServer{}onMtuChanged"
-    GATT_CONN_CHANGE = "GattConnect{}onConnectionStateChange"
-    CHAR_CHANGE = "GattConnect{}onCharacteristicChanged"
-    PHY_READ = "GattConnect{}onPhyRead"
-    PHY_UPDATE = "GattConnect{}onPhyUpdate"
-    SERV_PHY_READ = "GattServer{}onPhyRead"
-    SERV_PHY_UPDATE = "GattServer{}onPhyUpdate"
-
-
-class GattEvent(Enum):
-    CHAR_WRITE_REQ = {
-        "evt": GattCbStrings.CHAR_WRITE_REQ.value,
-        "err": GattCbErr.CHAR_WRITE_REQ_ERR.value
-    }
-    EXEC_WRITE = {
-        "evt": GattCbStrings.EXEC_WRITE.value,
-        "err": GattCbErr.EXEC_WRITE_ERR.value
-    }
-    CHAR_WRITE = {
-        "evt": GattCbStrings.CHAR_WRITE.value,
-        "err": GattCbErr.CHAR_WRITE_ERR.value
-    }
-    DESC_WRITE_REQ = {
-        "evt": GattCbStrings.DESC_WRITE_REQ.value,
-        "err": GattCbErr.DESC_WRITE_REQ_ERR.value
-    }
-    DESC_WRITE = {
-        "evt": GattCbStrings.DESC_WRITE.value,
-        "err": GattCbErr.DESC_WRITE_ERR.value
-    }
-    CHAR_READ = {
-        "evt": GattCbStrings.CHAR_READ.value,
-        "err": GattCbErr.CHAR_READ_ERR.value
-    }
-    CHAR_READ_REQ = {
-        "evt": GattCbStrings.CHAR_READ_REQ.value,
-        "err": GattCbErr.CHAR_READ_REQ_ERR.value
-    }
-    DESC_READ = {
-        "evt": GattCbStrings.DESC_READ.value,
-        "err": GattCbErr.DESC_READ_ERR.value
-    }
-    DESC_READ_REQ = {
-        "evt": GattCbStrings.DESC_READ_REQ.value,
-        "err": GattCbErr.DESC_READ_REQ_ERR.value
-    }
-    RD_REMOTE_RSSI = {
-        "evt": GattCbStrings.RD_REMOTE_RSSI.value,
-        "err": GattCbErr.RD_REMOTE_RSSI_ERR.value
-    }
-    GATT_SERV_DISC = {
-        "evt": GattCbStrings.GATT_SERV_DISC.value,
-        "err": GattCbErr.GATT_SERV_DISC_ERR.value
-    }
-    SERV_ADDED = {
-        "evt": GattCbStrings.SERV_ADDED.value,
-        "err": GattCbErr.SERV_ADDED_ERR.value
-    }
-    MTU_CHANGED = {
-        "evt": GattCbStrings.MTU_CHANGED.value,
-        "err": GattCbErr.MTU_CHANGED_ERR.value
-    }
-    GATT_CONN_CHANGE = {
-        "evt": GattCbStrings.GATT_CONN_CHANGE.value,
-        "err": GattCbErr.GATT_CONN_CHANGE_ERR.value
-    }
-    CHAR_CHANGE = {
-        "evt": GattCbStrings.CHAR_CHANGE.value,
-        "err": GattCbErr.CHAR_CHANGE_ERR.value
-    }
-    PHY_READ = {
-        "evt": GattCbStrings.PHY_READ.value,
-        "err": GattCbErr.PHY_READ_ERR.value
-    }
-    PHY_UPDATE = {
-        "evt": GattCbStrings.PHY_UPDATE.value,
-        "err": GattCbErr.PHY_UPDATE_ERR.value
-    }
-    SERV_PHY_READ = {
-        "evt": GattCbStrings.SERV_PHY_READ.value,
-        "err": GattCbErr.PHY_READ_ERR.value
-    }
-    SERV_PHY_UPDATE = {
-        "evt": GattCbStrings.SERV_PHY_UPDATE.value,
-        "err": GattCbErr.PHY_UPDATE_ERR.value
-    }
-
-
-class GattConnectionState(IntEnum):
-    STATE_DISCONNECTED = 0
-    STATE_CONNECTING = 1
-    STATE_CONNECTED = 2
-    STATE_DISCONNECTING = 3
-
-
-class GattCharacteristic(Enum):
-    PROPERTY_BROADCAST = 0x01
-    PROPERTY_READ = 0x02
-    PROPERTY_WRITE_NO_RESPONSE = 0x04
-    PROPERTY_WRITE = 0x08
-    PROPERTY_NOTIFY = 0x10
-    PROPERTY_INDICATE = 0x20
-    PROPERTY_SIGNED_WRITE = 0x40
-    PROPERTY_EXTENDED_PROPS = 0x80
-    PERMISSION_READ = 0x01
-    PERMISSION_READ_ENCRYPTED = 0x02
-    PERMISSION_READ_ENCRYPTED_MITM = 0x04
-    PERMISSION_WRITE = 0x10
-    PERMISSION_WRITE_ENCRYPTED = 0x20
-    PERMISSION_WRITE_ENCRYPTED_MITM = 0x40
-    PERMISSION_WRITE_SIGNED = 0x80
-    PERMISSION_WRITE_SIGNED_MITM = 0x100
-    WRITE_TYPE_DEFAULT = 0x02
-    WRITE_TYPE_NO_RESPONSE = 0x01
-    WRITE_TYPE_SIGNED = 0x04
-    FORMAT_UINT8 = 0x11
-    FORMAT_UINT16 = 0x12
-    FORMAT_UINT32 = 0x14
-    FORMAT_SINT8 = 0x21
-    FORMAT_SINT16 = 0x22
-    FORMAT_SINT32 = 0x24
-    FORMAT_SFLOAT = 0x32
-    FORMAT_FLOAT = 0x34
-
-
-class GattDescriptor(Enum):
-    ENABLE_NOTIFICATION_VALUE = [0x01, 0x00]
-    ENABLE_INDICATION_VALUE = [0x02, 0x00]
-    DISABLE_NOTIFICATION_VALUE = [0x00, 0x00]
-    PERMISSION_READ = 0x01
-    PERMISSION_READ_ENCRYPTED = 0x02
-    PERMISSION_READ_ENCRYPTED_MITM = 0x04
-    PERMISSION_WRITE = 0x10
-    PERMISSION_WRITE_ENCRYPTED = 0x20
-    PERMISSION_WRITE_ENCRYPTED_MITM = 0x40
-    PERMISSION_WRITE_SIGNED = 0x80
-    PERMISSION_WRITE_SIGNED_MITM = 0x100
-
-
-class GattCharDesc(Enum):
-    GATT_CHARAC_EXT_PROPER_UUID = '00002900-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_USER_DESC_UUID = '00002901-0000-1000-8000-00805f9b34fb'
-    GATT_CLIENT_CHARAC_CFG_UUID = '00002902-0000-1000-8000-00805f9b34fb'
-    GATT_SERVER_CHARAC_CFG_UUID = '00002903-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_FMT_UUID = '00002904-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_AGREG_FMT_UUID = '00002905-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_VALID_RANGE_UUID = '00002906-0000-1000-8000-00805f9b34fb'
-    GATT_EXTERNAL_REPORT_REFERENCE = '00002907-0000-1000-8000-00805f9b34fb'
-    GATT_REPORT_REFERENCE = '00002908-0000-1000-8000-00805f9b34fb'
-
-
-class GattCharTypes(Enum):
-    GATT_CHARAC_DEVICE_NAME = '00002a00-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_APPEARANCE = '00002a01-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_PERIPHERAL_PRIV_FLAG = '00002a02-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_RECONNECTION_ADDRESS = '00002a03-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_PERIPHERAL_PREF_CONN = '00002a04-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_SERVICE_CHANGED = '00002a05-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_SYSTEM_ID = '00002a23-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_MODEL_NUMBER_STRING = '00002a24-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_SERIAL_NUMBER_STRING = '00002a25-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_FIRMWARE_REVISION_STRING = '00002a26-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_HARDWARE_REVISION_STRING = '00002a27-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_SOFTWARE_REVISION_STRING = '00002a28-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_MANUFACTURER_NAME_STRING = '00002a29-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_PNP_ID = '00002a50-0000-1000-8000-00805f9b34fb'
-
-
-class GattCharacteristicAttrLength(Enum):
-    MTU_ATTR_1 = 1
-    MTU_ATTR_2 = 3
-    MTU_ATTR_3 = 15
-
-
-class CharacteristicValueFormat(Enum):
-    STRING = 0x1
-    BYTE = 0x2
-    FORMAT_SINT8 = 0x21
-    FORMAT_UINT8 = 0x11
-    FORMAT_SINT16 = 0x22
-    FORMAT_UINT16 = 0x12
-    FORMAT_SINT32 = 0x24
-    FORMAT_UINT32 = 0x14
-
-
-class GattService(IntEnum):
-    SERVICE_TYPE_PRIMARY = 0
-    SERVICE_TYPE_SECONDARY = 1
-
-
-class GattConnectionPriority(IntEnum):
-    CONNECTION_PRIORITY_BALANCED = 0
-    CONNECTION_PRIORITY_HIGH = 1
-    CONNECTION_PRIORITY_LOW_POWER = 2
-
-
-class MtuSize(IntEnum):
-    MIN = 23
-    MAX = 217
-
-
-class GattCharacteristicAttrLength(IntEnum):
-    MTU_ATTR_1 = 1
-    MTU_ATTR_2 = 3
-    MTU_ATTR_3 = 15
-
-
-class BluetoothGatt(Enum):
-    GATT_SUCCESS = 0
-    GATT_FAILURE = 0x101
-
-
-class GattTransport(IntEnum):
-    TRANSPORT_AUTO = 0x00
-    TRANSPORT_BREDR = 0x01
-    TRANSPORT_LE = 0x02
-
-
-class GattPhy(IntEnum):
-    PHY_LE_1M = 1
-    PHY_LE_2M = 2
-    PHY_LE_CODED = 3
-
-
-class GattPhyMask(IntEnum):
-    PHY_LE_1M_MASK = 1
-    PHY_LE_2M_MASK = 2
-    PHY_LE_CODED_MASK = 4
-
-
-# TODO Decide whether to continue with Enums or move to dictionaries
-GattServerResponses = {
-    "GATT_SUCCESS": 0x0,
-    "GATT_FAILURE": 0x1,
-    "GATT_READ_NOT_PERMITTED": 0x2,
-    "GATT_WRITE_NOT_PERMITTED": 0x3,
-    "GATT_INVALID_PDU": 0x4,
-    "GATT_INSUFFICIENT_AUTHENTICATION": 0x5,
-    "GATT_REQUEST_NOT_SUPPORTED": 0x6,
-    "GATT_INVALID_OFFSET": 0x7,
-    "GATT_INSUFFICIENT_AUTHORIZATION": 0x8,
-    "GATT_INVALID_ATTRIBUTE_LENGTH": 0xD,
-    "GATT_INSUFFICIENT_ENCRYPTION": 0xF,
-    "GATT_CONNECTION_CONGESTED": 0x8F,
-    "GATT_13_ERR": 0x13,
-    "GATT_12_ERR": 0x12,
-    "GATT_0C_ERR": 0x0C,
-    "GATT_16": 0x16
-}
diff --git a/src/antlion/test_utils/bt/ble_lib.py b/src/antlion/test_utils/bt/ble_lib.py
deleted file mode 100644
index ee2cb1c..0000000
--- a/src/antlion/test_utils/bt/ble_lib.py
+++ /dev/null
@@ -1,211 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Ble libraries
-"""
-
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_modes
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_tx_powers
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_constants import small_timeout
-from antlion.test_utils.bt.bt_constants import adv_fail
-from antlion.test_utils.bt.bt_constants import adv_succ
-from antlion.test_utils.bt.bt_constants import advertising_set_on_own_address_read
-from antlion.test_utils.bt.bt_constants import advertising_set_started
-from antlion.test_utils.bt.bt_test_utils import generate_ble_advertise_objects
-
-import time
-
-
-class BleLib():
-    def __init__(self, log, dut):
-        self.advertisement_list = []
-        self.dut = dut
-        self.log = log
-        self.default_timeout = 5
-        self.set_advertisement_list = []
-        self.generic_uuid = "0000{}-0000-1000-8000-00805f9b34fb"
-
-    def _verify_ble_adv_started(self, advertise_callback):
-        """Helper for verifying if an advertisment started or not"""
-        regex = "({}|{})".format(adv_succ.format(advertise_callback),
-                                 adv_fail.format(advertise_callback))
-        try:
-            event = self.dut.ed.pop_events(regex, 5, small_timeout)
-        except Empty:
-            self.dut.log.error("Failed to get success or failed event.")
-            return
-        if event[0]["name"] == adv_succ.format(advertise_callback):
-            self.dut.log.info("Advertisement started successfully.")
-            return True
-        else:
-            self.dut.log.info("Advertisement failed to start.")
-            return False
-
-    def start_generic_connectable_advertisement(self, line):
-        """Start a connectable LE advertisement"""
-        scan_response = None
-        if line:
-            scan_response = bool(line)
-        self.dut.droid.bleSetAdvertiseSettingsAdvertiseMode(
-            ble_advertise_settings_modes['low_latency'])
-        self.dut.droid.bleSetAdvertiseSettingsIsConnectable(True)
-        advertise_callback, advertise_data, advertise_settings = (
-            generate_ble_advertise_objects(self.dut.droid))
-        if scan_response:
-            self.dut.droid.bleStartBleAdvertisingWithScanResponse(
-                advertise_callback, advertise_data, advertise_settings,
-                advertise_data)
-        else:
-            self.dut.droid.bleStartBleAdvertising(advertise_callback,
-                                                  advertise_data,
-                                                  advertise_settings)
-        if self._verify_ble_adv_started(advertise_callback):
-            self.log.info(
-                "Tracking Callback ID: {}".format(advertise_callback))
-            self.advertisement_list.append(advertise_callback)
-            self.log.info(self.advertisement_list)
-
-    def start_connectable_advertisement_set(self, line):
-        """Start Connectable Advertisement Set"""
-        adv_callback = self.dut.droid.bleAdvSetGenCallback()
-        adv_data = {
-            "includeDeviceName": True,
-        }
-        self.dut.droid.bleAdvSetStartAdvertisingSet(
-            {
-                "connectable": True,
-                "legacyMode": False,
-                "primaryPhy": "PHY_LE_1M",
-                "secondaryPhy": "PHY_LE_1M",
-                "interval": 320
-            }, adv_data, None, None, None, 0, 0, adv_callback)
-        evt = self.dut.ed.pop_event(
-            advertising_set_started.format(adv_callback), self.default_timeout)
-        set_id = evt['data']['setId']
-        self.log.error("did not receive the set started event!")
-        evt = self.dut.ed.pop_event(
-            advertising_set_on_own_address_read.format(set_id),
-            self.default_timeout)
-        address = evt['data']['address']
-        self.log.info("Advertiser address is: {}".format(str(address)))
-        self.set_advertisement_list.append(adv_callback)
-
-    def stop_all_advertisement_set(self, line):
-        """Stop all Advertisement Sets"""
-        for adv in self.set_advertisement_list:
-            try:
-                self.dut.droid.bleAdvSetStopAdvertisingSet(adv)
-            except Exception as err:
-                self.log.error("Failed to stop advertisement: {}".format(err))
-
-    def adv_add_service_uuid_list(self, line):
-        """Add service UUID to the LE advertisement inputs:
-         [uuid1 uuid2 ... uuidN]"""
-        uuids = line.split()
-        uuid_list = []
-        for uuid in uuids:
-            if len(uuid) == 4:
-                uuid = self.generic_uuid.format(line)
-            uuid_list.append(uuid)
-        self.dut.droid.bleSetAdvertiseDataSetServiceUuids(uuid_list)
-
-    def adv_data_include_local_name(self, is_included):
-        """Include local name in the advertisement. inputs: [true|false]"""
-        self.dut.droid.bleSetAdvertiseDataIncludeDeviceName(bool(is_included))
-
-    def adv_data_include_tx_power_level(self, is_included):
-        """Include tx power level in the advertisement. inputs: [true|false]"""
-        self.dut.droid.bleSetAdvertiseDataIncludeTxPowerLevel(
-            bool(is_included))
-
-    def adv_data_add_manufacturer_data(self, line):
-        """Include manufacturer id and data to the advertisment:
-        [id data1 data2 ... dataN]"""
-        info = line.split()
-        manu_id = int(info[0])
-        manu_data = []
-        for data in info[1:]:
-            manu_data.append(int(data))
-        self.dut.droid.bleAddAdvertiseDataManufacturerId(manu_id, manu_data)
-
-    def start_generic_nonconnectable_advertisement(self, line):
-        """Start a nonconnectable LE advertisement"""
-        self.dut.droid.bleSetAdvertiseSettingsAdvertiseMode(
-            ble_advertise_settings_modes['low_latency'])
-        self.dut.droid.bleSetAdvertiseSettingsIsConnectable(False)
-        advertise_callback, advertise_data, advertise_settings = (
-            generate_ble_advertise_objects(self.dut.droid))
-        self.dut.droid.bleStartBleAdvertising(advertise_callback,
-                                              advertise_data,
-                                              advertise_settings)
-        if self._verify_ble_adv_started(advertise_callback):
-            self.log.info(
-                "Tracking Callback ID: {}".format(advertise_callback))
-            self.advertisement_list.append(advertise_callback)
-            self.log.info(self.advertisement_list)
-
-    def stop_all_advertisements(self, line):
-        """Stop all LE advertisements"""
-        for callback_id in self.advertisement_list:
-            self.log.info("Stopping Advertisement {}".format(callback_id))
-            self.dut.droid.bleStopBleAdvertising(callback_id)
-            time.sleep(1)
-        self.advertisement_list = []
-
-    def ble_stop_advertisement(self, callback_id):
-        """Stop an LE advertisement"""
-        if not callback_id:
-            self.log.info("Need a callback ID")
-            return
-        callback_id = int(callback_id)
-        if callback_id not in self.advertisement_list:
-            self.log.info("Callback not in list of advertisements.")
-            return
-        self.dut.droid.bleStopBleAdvertising(callback_id)
-        self.advertisement_list.remove(callback_id)
-
-    def start_max_advertisements(self, line):
-        scan_response = None
-        if line:
-            scan_response = bool(line)
-        while (True):
-            try:
-                self.dut.droid.bleSetAdvertiseSettingsAdvertiseMode(
-                    ble_advertise_settings_modes['low_latency'])
-                self.dut.droid.bleSetAdvertiseSettingsIsConnectable(True)
-                advertise_callback, advertise_data, advertise_settings = (
-                    generate_ble_advertise_objects(self.dut.droid))
-                if scan_response:
-                    self.dut.droid.bleStartBleAdvertisingWithScanResponse(
-                        advertise_callback, advertise_data, advertise_settings,
-                        advertise_data)
-                else:
-                    self.dut.droid.bleStartBleAdvertising(
-                        advertise_callback, advertise_data, advertise_settings)
-                if self._verify_ble_adv_started(advertise_callback):
-                    self.log.info(
-                        "Tracking Callback ID: {}".format(advertise_callback))
-                    self.advertisement_list.append(advertise_callback)
-                    self.log.info(self.advertisement_list)
-                else:
-                    self.log.info("Advertisements active: {}".format(
-                        len(self.advertisement_list)))
-                    return False
-            except Exception as err:
-                self.log.info("Advertisements active: {}".format(
-                    len(self.advertisement_list)))
-                return True
diff --git a/src/antlion/test_utils/bt/ble_performance_test_utils.py b/src/antlion/test_utils/bt/ble_performance_test_utils.py
deleted file mode 100644
index 29055f6..0000000
--- a/src/antlion/test_utils/bt/ble_performance_test_utils.py
+++ /dev/null
@@ -1,328 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-import datetime
-import statistics
-import os
-from antlion.test_utils.bt.bt_constants import advertising_set_started
-import antlion.test_utils.wifi.wifi_performance_test_utils.bokeh_figure as bokeh_figure
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_phys
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_gatt_utils import close_gatt_client
-from antlion.test_utils.bt.bt_coc_test_utils import do_multi_connection_throughput
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from queue import Empty
-from antlion.test_utils.bt.bt_constants import gatt_cb_err
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import l2cap_coc_header_size
-from antlion.test_utils.bt.bt_gatt_utils import GattTestUtilsError
-from antlion.test_utils.bt.bt_test_utils import generate_ble_scan_objects
-from antlion.test_utils.bt.bt_coc_test_utils import orchestrate_coc_connection
-from antlion.test_utils.bt.bt_gatt_utils import orchestrate_gatt_connection
-from concurrent.futures import ThreadPoolExecutor
-
-default_event_timeout = 10
-rssi_read_duration = 25
-
-
-def establish_ble_connection(client_ad, server_ad):
-    """Function to establish BLE connection between two BLE devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-    Returns:
-        bluetooth_gatt: GATT object
-        gatt_callback: Gatt callback object
-        adv_callback: advertisement callback object
-        gatt_server: the gatt server
-    """
-    gatt_server_cb = server_ad.droid.gattServerCreateGattServerCallback()
-    gatt_server = server_ad.droid.gattServerOpenGattServer(gatt_server_cb)
-    try:
-        bluetooth_gatt, gatt_callback, adv_callback = (
-            orchestrate_gatt_connection(client_ad, server_ad))
-    except GattTestUtilsError as err:
-        logging.error(err)
-        return False
-    return bluetooth_gatt, gatt_callback, adv_callback, gatt_server
-
-
-def read_ble_rssi(client_ad, gatt_server, gatt_callback):
-    """Function to Read BLE RSSI of the remote BLE device.
-    Args:
-        client_ad: the Android device performing the connection.
-        gatt_server: the gatt server
-        gatt_callback:the gatt connection call back object
-    Returns:
-      ble_rssi: RSSI value of the remote BLE device
-    """
-    AVG_RSSI = []
-    end_time = time.time() + rssi_read_duration
-    logging.info("Reading BLE RSSI for {} sec".format(rssi_read_duration))
-    while time.time() < end_time:
-        expected_event = gatt_cb_strings['rd_remote_rssi'].format(
-            gatt_callback)
-        read_rssi = client_ad.droid.gattClientReadRSSI(gatt_server)
-        if read_rssi:
-            try:
-                event = client_ad.ed.pop_event(expected_event,
-                                               default_event_timeout)
-            except Empty:
-                logging.error(
-                    gatt_cb_err['rd_remote_rssi_err'].format(expected_event))
-                return False
-        rssi_value = event['data']['Rssi']
-        AVG_RSSI.append(rssi_value)
-    logging.debug("First & Last reading of RSSI :{:03d} & {:03d}".format(
-        AVG_RSSI[0], AVG_RSSI[-1]))
-    ble_rssi = statistics.mean(AVG_RSSI)
-    ble_rssi = round(ble_rssi, 2)
-
-    return ble_rssi
-
-
-def read_ble_scan_rssi(client_ad, scan_callback, rssi_read_duration=30):
-    """Function to Read BLE RSSI of the remote BLE device.
-    Args:
-        client_ad: the Android device performing the connection.
-        scan_callback: the scan callback of the server
-    Returns:
-      ble_rssi: RSSI value of the remote BLE device
-      raw_rssi: RSSI list of remote BLE device
-    """
-    raw_rssi = []
-    timestamp = []
-    end_time = time.time() + rssi_read_duration
-    logging.info("Reading BLE Scan RSSI for {} sec".format(rssi_read_duration))
-    while time.time() < end_time:
-        expected_event = gatt_cb_strings['rd_remote_ble_rssi'].format(
-            scan_callback)
-        try:
-            event = client_ad.ed.pop_event(expected_event,
-                                           default_event_timeout)
-        except Empty:
-            logging.error(
-                gatt_cb_err['rd_remote_rssi_err'].format(expected_event))
-            return False
-        rssi_value = event['data']['Result']['rssi']
-        epoch_time = event['time']
-        d = datetime.datetime.fromtimestamp(epoch_time / 1000)
-        tstamp = d.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
-        timestamp.append(tstamp)
-        raw_rssi.append(rssi_value)
-    logging.debug("First & Last reading of RSSI :{:03d} & {:03d}".format(
-        raw_rssi[0], raw_rssi[-1]))
-    ble_rssi = statistics.mean(raw_rssi)
-    ble_rssi = round(ble_rssi, 2)
-
-    return ble_rssi, raw_rssi, timestamp
-
-
-def ble_coc_connection(client_ad, server_ad):
-    """Sets up the CoC connection between two Android devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-
-    Returns:
-        True if connection was successful or false if unsuccessful,
-        gatt_callback: GATT callback object
-        client connection ID: Client connection ID
-        and server connection ID : server connection ID
-    """
-    # secured_conn: True if using secured connection
-    # le_connection_interval: LE Connection interval. 0 means use default.
-    # buffer_size : is the number of bytes per L2CAP data buffer
-    # le_tx_data_length: LE Data Length used by BT Controller to transmit.
-    is_secured = False
-    le_connection_interval = 30
-    buffer_size = 240
-    le_tx_data_length = buffer_size + l2cap_coc_header_size
-    gatt_server_cb = server_ad.droid.gattServerCreateGattServerCallback()
-    gatt_server = server_ad.droid.gattServerOpenGattServer(gatt_server_cb)
-
-    logging.info(
-        "orchestrate_ble_coc_connection. is_secured={}, Connection Interval={}msec, "
-        "buffer_size={}bytes".format(is_secured, le_connection_interval,
-                                     buffer_size))
-    try:
-        status, client_conn_id, server_conn_id, bluetooth_gatt, gatt_callback = orchestrate_coc_connection(
-            client_ad,
-            server_ad,
-            True,
-            is_secured,
-            le_connection_interval,
-            le_tx_data_length,
-            gatt_disconnection=False)
-    except Exception as err:
-        logging.info("Failed to esatablish COC connection".format(err))
-        return 0
-    return True, gatt_callback, gatt_server, bluetooth_gatt, client_conn_id
-
-
-def run_ble_throughput(server_ad,
-                       client_conn_id,
-                       client_ad,
-                       num_iterations=30):
-    """Function to measure Throughput from one client to one-or-many servers
-
-    Args:
-        server_ad: the Android device accepting the connection.
-        client_conn_id: the client connection ID.
-        client_ad: the Android device performing the connection.
-        num_iterations: The num_iterations is that number of repetitions of each
-        set of buffers r/w.
-    Returns:
-      data_rate: Throughput in terms of bytes per second, 0 if test failed.
-    """
-    # number_buffers is the total number of data buffers to transmit per
-    # set of buffers r/w.
-    # buffer_size is the number of bytes per L2CAP data buffer.
-    number_buffers = 100
-    buffer_size = 240
-    list_server_ad = [server_ad]
-    list_client_conn_id = [client_conn_id]
-    data_rate = do_multi_connection_throughput(client_ad, list_server_ad,
-                                               list_client_conn_id,
-                                               num_iterations, number_buffers,
-                                               buffer_size)
-    if data_rate <= 0:
-        return False
-    data_rate = data_rate * 8
-    logging.info(
-        "run_ble_coc_connection_throughput: throughput=%d bites per sec",
-        data_rate)
-    return data_rate
-
-
-def run_ble_throughput_and_read_rssi(client_ad, server_ad, client_conn_id,
-                                     gatt_server, gatt_callback):
-    """Function to measure ble rssi while sendinng data from client to server
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-        client_conn_id: the client connection ID.
-        gatt_server: the gatt server
-        gatt_callback: Gatt callback object
-    Returns:
-      ble_rssi: RSSI value of the remote BLE device.
-    """
-    executor = ThreadPoolExecutor(2)
-    ble_throughput = executor.submit(run_ble_throughput, client_ad,
-                                     client_conn_id, server_ad)
-    ble_rssi = executor.submit(read_ble_rssi, server_ad, gatt_server,
-                               gatt_callback)
-    logging.info("BLE RSSI is:{} dBm with data rate={} bites per sec ".format(
-        ble_rssi.result(), ble_throughput.result()))
-    return ble_rssi.result()
-
-
-def ble_gatt_disconnection(client_ad, bluetooth_gatt, gatt_callback):
-    """Function to disconnect GATT connection between client and server.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        bluetooth_gatt: GATT object
-        gatt_callback:the gatt connection call back object
-    Returns:
-      ble_rssi: RSSI value of the remote BLE device
-    """
-    logging.info("Disconnecting from peripheral device.")
-    try:
-        disconnect_gatt_connection(client_ad, bluetooth_gatt, gatt_callback)
-        close_gatt_client(client_ad, bluetooth_gatt)
-    except GattTestUtilsError as err:
-        logging.error(err)
-        return False
-    return True
-
-
-def plot_graph(df, plot_data, bokeh_data, secondary_y_label=None):
-    """ Plotting for generating bokeh figure
-
-    Args:
-        df: Summary of results contains attenuation, DUT RSSI, remote RSSI and Tx Power
-        plot_data: plot_data for adding line to existing BokehFigure
-        bokeh_data: bokeh data for generating BokehFigure
-        secondary_y_label : label for secondary y axis , None if not available
-    """
-    plot = bokeh_figure.BokehFigure(
-        title='{}'.format(bokeh_data['current_test_name']),
-        x_label=bokeh_data['x_label'],
-        primary_y_label=bokeh_data['primary_y_label'],
-        secondary_y_label=secondary_y_label,
-        axis_label_size='16pt',
-        legend_label_size='16pt',
-        axis_tick_label_size='16pt',
-        sizing_mode='stretch_both')
-
-    for data in plot_data:
-        plot.add_line(df[plot_data[data].get('x_column')],
-                      df[plot_data[data].get('y_column')],
-                      legend=plot_data[data].get('legend'),
-                      marker=plot_data[data].get('marker'),
-                      y_axis=plot_data[data].get('y_axis'))
-
-    results_file_path = os.path.join(
-        bokeh_data['log_path'],
-        '{}.html'.format(bokeh_data['current_test_name']))
-    plot.generate_figure()
-    bokeh_figure.BokehFigure.save_figures([plot], results_file_path)
-
-
-def start_advertising_and_scanning(client_ad, server_ad, Legacymode=True):
-    """Function to start bt5 advertisement.
-
-        Args:
-            client_ad: the Android device performing the scanning.
-            server_ad: the Android device performing the bt advertising
-            Legacymode: True for Legacy advertising mode, false for bt5 advertising mode
-        Returns:
-          adv_callback: the advertising callback
-          scan_callback: the scan_callback
-        """
-    adv_callback = server_ad.droid.bleAdvSetGenCallback()
-    adv_data = {
-        "includeDeviceName": True,
-    }
-    server_ad.droid.bleAdvSetStartAdvertisingSet(
-        {
-            "connectable": False,
-            "legacyMode": Legacymode,
-            "primaryPhy": "PHY_LE_1M",
-            "secondaryPhy": "PHY_LE_1M",
-            "interval": 320
-        }, adv_data, None, None, None, 0, 0, adv_callback)
-    server_ad.ed.pop_event(advertising_set_started.format(adv_callback),
-                           default_event_timeout)
-    logging.info("Bt5 Advertiser Started Successfully")
-    client_ad.droid.bleSetScanSettingsLegacy(False)
-    client_ad.droid.bleSetScanSettingsScanMode(
-        ble_scan_settings_modes['low_latency'])
-    client_ad.droid.bleSetScanSettingsPhy(ble_scan_settings_phys['1m'])
-
-    filter_list, scan_settings, scan_callback = generate_ble_scan_objects(
-        client_ad.droid)
-    adv_device_name = server_ad.droid.bluetoothGetLocalName()
-    client_ad.droid.bleSetScanFilterDeviceName(adv_device_name)
-    client_ad.droid.bleBuildScanFilter(filter_list)
-    client_ad.droid.bleStartBleScan(filter_list, scan_settings, scan_callback)
-    return adv_callback, scan_callback
diff --git a/src/antlion/test_utils/bt/bt_coc_test_utils.py b/src/antlion/test_utils/bt/bt_coc_test_utils.py
deleted file mode 100644
index 6da6350..0000000
--- a/src/antlion/test_utils/bt/bt_coc_test_utils.py
+++ /dev/null
@@ -1,299 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-from antlion import utils
-
-from antlion.test_utils.bt.bt_constants import bt_default_timeout
-from antlion.test_utils.bt.bt_constants import default_bluetooth_socket_timeout_ms
-from antlion.test_utils.bt.bt_constants import default_le_connection_interval_ms
-from antlion.test_utils.bt.bt_constants import default_le_data_length
-from antlion.test_utils.bt.bt_constants import gatt_phy
-from antlion.test_utils.bt.bt_constants import gatt_transport
-from antlion.test_utils.bt.bt_constants import l2cap_coc_header_size
-from antlion.test_utils.bt.bt_constants import le_connection_event_time_step_ms
-from antlion.test_utils.bt.bt_constants import le_connection_interval_time_step_ms
-from antlion.test_utils.bt.bt_constants import le_default_supervision_timeout
-from antlion.test_utils.bt.bt_test_utils import get_mac_address_of_generic_advertisement
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-
-log = logging
-
-
-class BtCoCTestUtilsError(Exception):
-    pass
-
-
-def do_multi_connection_throughput(client_ad, list_server_ad,
-                                   list_client_conn_id, num_iterations,
-                                   number_buffers, buffer_size):
-    """Throughput measurements from one client to one-or-many servers.
-
-    Args:
-        client_ad: the Android device to perform the write.
-        list_server_ad: the list of Android server devices connected to this client.
-        list_client_conn_id: list of client connection IDs
-        num_iterations: the number of test repetitions.
-        number_buffers: the total number of data buffers to transmit per test.
-        buffer_size: the number of bytes per L2CAP data buffer.
-
-    Returns:
-        Throughput in terms of bytes per second, 0 if test failed.
-    """
-
-    total_num_bytes = 0
-    start_write_time = time.perf_counter()
-    client_ad.log.info(
-        "do_multi_connection_throughput: Before write. Start Time={:f}, "
-        "num_iterations={}, number_buffers={}, buffer_size={}, "
-        "number_buffers*buffer_size={}, num_servers={}".format(
-            start_write_time, num_iterations, number_buffers, buffer_size,
-            number_buffers * buffer_size, len(list_server_ad)))
-
-    if (len(list_server_ad) != len(list_client_conn_id)):
-        client_ad.log.error("do_multi_connection_throughput: invalid "
-                            "parameters. Num of list_server_ad({}) != "
-                            "list_client_conn({})".format(
-                                len(list_server_ad), len(list_client_conn_id)))
-        return 0
-
-    try:
-        for _, client_conn_id in enumerate(list_client_conn_id):
-            client_ad.log.info("do_multi_connection_throughput: "
-                               "client_conn_id={}".format(client_conn_id))
-            # Plumb the tx data queue with the first set of data buffers.
-            client_ad.droid.bluetoothConnectionThroughputSend(
-                number_buffers, buffer_size, client_conn_id)
-    except Exception as err:
-        client_ad.log.error("Failed to write data: {}".format(err))
-        return 0
-
-    # Each Loop iteration will write and read one set of buffers.
-    for _ in range(0, (num_iterations - 1)):
-        try:
-            for _, client_conn_id in enumerate(list_client_conn_id):
-                client_ad.droid.bluetoothConnectionThroughputSend(
-                    number_buffers, buffer_size, client_conn_id)
-        except Exception as err:
-            client_ad.log.error("Failed to write data: {}".format(err))
-            return 0
-
-        for _, server_ad in enumerate(list_server_ad):
-            try:
-                server_ad.droid.bluetoothConnectionThroughputRead(
-                    number_buffers, buffer_size)
-                total_num_bytes += number_buffers * buffer_size
-            except Exception as err:
-                server_ad.log.error("Failed to read data: {}".format(err))
-                return 0
-
-    for _, server_ad in enumerate(list_server_ad):
-        try:
-            server_ad.droid.bluetoothConnectionThroughputRead(
-                number_buffers, buffer_size)
-            total_num_bytes += number_buffers * buffer_size
-        except Exception as err:
-            server_ad.log.error("Failed to read data: {}".format(err))
-            return 0
-
-    end_read_time = time.perf_counter()
-
-    test_time = (end_read_time - start_write_time)
-    if (test_time == 0):
-        client_ad.log.error("Buffer transmits cannot take zero time")
-        return 0
-    data_rate = (1.000 * total_num_bytes) / test_time
-    log.info(
-        "Calculated using total write and read times: total_num_bytes={}, "
-        "test_time={}, data rate={:08.0f} bytes/sec, {:08.0f} bits/sec".format(
-            total_num_bytes, test_time, data_rate, (data_rate * 8)))
-    return data_rate
-
-
-def orchestrate_coc_connection(
-        client_ad,
-        server_ad,
-        is_ble,
-        secured_conn=False,
-        le_connection_interval=0,
-        le_tx_data_length=default_le_data_length,
-        accept_timeout_ms=default_bluetooth_socket_timeout_ms,
-        le_min_ce_len=0,
-        le_max_ce_len=0,
-        gatt_disconnection=True):
-    """Sets up the CoC connection between two Android devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-        is_ble: using LE transport.
-        secured_conn: using secured connection
-        le_connection_interval: LE Connection interval. 0 means use default.
-        le_tx_data_length: LE Data Length used by BT Controller to transmit.
-        accept_timeout_ms: timeout while waiting for incoming connection.
-        gatt_disconnection: LE GATT disconnection, default is True, False will return
-        bluetooth_gatt and gatt_callback
-    Returns:
-        True if connection was successful or false if unsuccessful,
-        client connection ID,
-        and server connection ID
-    """
-    server_ad.droid.bluetoothStartPairingHelper()
-    client_ad.droid.bluetoothStartPairingHelper()
-
-    adv_callback = None
-    mac_address = None
-    if is_ble:
-        try:
-            # This will start advertising and scanning. Will fail if it could
-            # not find the advertisements from server_ad
-            client_ad.log.info(
-                "Orchestrate_coc_connection: Start BLE advertisement and"
-                "scanning. Secured Connection={}".format(secured_conn))
-            mac_address, adv_callback, scan_callback = (
-                get_mac_address_of_generic_advertisement(client_ad, server_ad))
-        except BtTestUtilsError as err:
-            raise BtCoCTestUtilsError(
-                "Orchestrate_coc_connection: Error in getting mac address: {}".
-                format(err))
-    else:
-        mac_address = server_ad.droid.bluetoothGetLocalAddress()
-        adv_callback = None
-
-    # Adjust the Connection Interval (if necessary)
-    bluetooth_gatt_1 = -1
-    gatt_callback_1 = -1
-    gatt_connected = False
-    if is_ble and (le_connection_interval != 0 or le_min_ce_len != 0 or le_max_ce_len != 0):
-        client_ad.log.info(
-            "Adjusting connection interval={}, le_min_ce_len={}, le_max_ce_len={}"
-            .format(le_connection_interval, le_min_ce_len, le_max_ce_len))
-        try:
-            bluetooth_gatt_1, gatt_callback_1 = setup_gatt_connection(
-                client_ad,
-                mac_address,
-                False,
-                transport=gatt_transport['le'],
-                opportunistic=False)
-            client_ad.droid.bleStopBleScan(scan_callback)
-        except GattTestUtilsError as err:
-            client_ad.log.error(err)
-            if (adv_callback != None):
-                server_ad.droid.bleStopBleAdvertising(adv_callback)
-            return False, None, None
-        client_ad.log.info("setup_gatt_connection returns success")
-        if (le_connection_interval != 0):
-            minInterval = le_connection_interval / le_connection_interval_time_step_ms
-            maxInterval = le_connection_interval / le_connection_interval_time_step_ms
-        else:
-            minInterval = default_le_connection_interval_ms / le_connection_interval_time_step_ms
-            maxInterval = default_le_connection_interval_ms / le_connection_interval_time_step_ms
-        if (le_min_ce_len != 0):
-            le_min_ce_len = le_min_ce_len / le_connection_event_time_step_ms
-        if (le_max_ce_len != 0):
-            le_max_ce_len = le_max_ce_len / le_connection_event_time_step_ms
-
-        return_status = client_ad.droid.gattClientRequestLeConnectionParameters(
-            bluetooth_gatt_1, minInterval, maxInterval, 0,
-            le_default_supervision_timeout, le_min_ce_len, le_max_ce_len)
-        if not return_status:
-            client_ad.log.error(
-                "gattClientRequestLeConnectionParameters returns failure")
-            if (adv_callback != None):
-                server_ad.droid.bleStopBleAdvertising(adv_callback)
-            return False, None, None
-        client_ad.log.info(
-            "gattClientRequestLeConnectionParameters returns success. Interval={}"
-            .format(minInterval))
-        gatt_connected = True
-        # For now, we will only test with 1 Mbit Phy.
-        # TODO: Add explicit tests with 2 MBit Phy.
-        client_ad.droid.gattClientSetPreferredPhy(
-            bluetooth_gatt_1, gatt_phy['1m'], gatt_phy['1m'], 0)
-
-    server_ad.droid.bluetoothSocketConnBeginAcceptThreadPsm(
-        accept_timeout_ms, is_ble, secured_conn)
-
-    psm_value = server_ad.droid.bluetoothSocketConnGetPsm()
-    client_ad.log.info("Assigned PSM value={}".format(psm_value))
-
-    client_ad.droid.bluetoothSocketConnBeginConnectThreadPsm(
-        mac_address, is_ble, psm_value, secured_conn)
-
-    if (le_tx_data_length != default_le_data_length) and is_ble:
-        client_ad.log.info("orchestrate_coc_connection: call "
-                           "bluetoothSocketRequestMaximumTxDataLength")
-        client_ad.droid.bluetoothSocketRequestMaximumTxDataLength()
-
-    end_time = time.time() + bt_default_timeout
-    test_result = False
-    while time.time() < end_time:
-        if len(server_ad.droid.bluetoothSocketConnActiveConnections()) > 0:
-            server_ad.log.info("CoC Server Connection Active")
-            if len(client_ad.droid.bluetoothSocketConnActiveConnections()) > 0:
-                client_ad.log.info("CoC Client Connection Active")
-                test_result = True
-                break
-        time.sleep(1)
-
-    if (adv_callback != None):
-        server_ad.droid.bleStopBleAdvertising(adv_callback)
-
-    if not test_result:
-        client_ad.log.error("Failed to establish an CoC connection")
-        return False, None, None
-
-    if len(client_ad.droid.bluetoothSocketConnActiveConnections()) > 0:
-        server_ad.log.info(
-            "CoC client_ad Connection Active, num=%d",
-            len(client_ad.droid.bluetoothSocketConnActiveConnections()))
-    else:
-        server_ad.log.info("Error CoC client_ad Connection Inactive")
-        client_ad.log.info("Error CoC client_ad Connection Inactive")
-
-    # Wait for the client to be ready
-    client_conn_id = None
-    while (client_conn_id == None):
-        client_conn_id = client_ad.droid.bluetoothGetLastConnId()
-        if (client_conn_id != None):
-            break
-        time.sleep(1)
-
-    # Wait for the server to be ready
-    server_conn_id = None
-    while (server_conn_id == None):
-        server_conn_id = server_ad.droid.bluetoothGetLastConnId()
-        if (server_conn_id != None):
-            break
-        time.sleep(1)
-
-    client_ad.log.info(
-        "orchestrate_coc_connection: client conn id={}, server conn id={}".
-        format(client_conn_id, server_conn_id))
-
-    if gatt_disconnection:
-
-        if gatt_connected:
-            disconnect_gatt_connection(client_ad, bluetooth_gatt_1,
-                                       gatt_callback_1)
-            client_ad.droid.gattClientClose(bluetooth_gatt_1)
-
-        return True, client_conn_id, server_conn_id
-
-    else:
-        return True, client_conn_id, server_conn_id, bluetooth_gatt_1, gatt_callback_1
diff --git a/src/antlion/test_utils/bt/bt_constants.py b/src/antlion/test_utils/bt/bt_constants.py
deleted file mode 100644
index 5d7f883..0000000
--- a/src/antlion/test_utils/bt/bt_constants.py
+++ /dev/null
@@ -1,798 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-### Generic Constants Begin ###
-
-bt_default_timeout = 15
-default_rfcomm_timeout_ms = 10000
-default_bluetooth_socket_timeout_ms = 10000
-pan_connect_timeout = 5
-bt_discovery_timeout = 3
-small_timeout = 0.0001
-
-# Time delay (in seconds) at the end of each LE CoC Test to give sufficient time
-# for the ACL LE link to be disconnected. The ACL link stays connected after
-# L2CAP disconnects.  An example of the timeout is L2CAP_LINK_INACTIVITY_TOUT.
-# This delay must be greater than the maximum of these timeouts.
-# TODO: Investigate the use of broadcast intent
-# BluetoothDevice.ACTION_ACL_DISCONNECTED to replace this delay method.
-l2cap_max_inactivity_delay_after_disconnect = 5
-
-# LE specifications related constants
-le_connection_interval_time_step_ms = 1.25
-le_default_supervision_timeout = 2000
-default_le_data_length = 23
-default_le_connection_interval_ms = 30
-le_connection_event_time_step_ms = 0.625
-
-# Headers of LE L2CAP Connection-oriented Channels. See section 3.4, Vol
-# 3, Part A, Version 5.0.
-l2cap_header_size = 4
-l2cap_coc_sdu_length_field_size = 2
-l2cap_coc_header_size = l2cap_header_size + l2cap_coc_sdu_length_field_size
-
-java_integer = {"min": -2147483648, "max": 2147483647}
-
-btsnoop_log_path_on_device = "/data/misc/bluetooth/logs/btsnoop_hci.log"
-btsnoop_last_log_path_on_device = \
-    "/data/misc/bluetooth/logs/btsnoop_hci.log.last"
-pairing_variant_passkey_confirmation = 2
-
-# Callback strings
-scan_result = "BleScan{}onScanResults"
-scan_failed = "BleScan{}onScanFailed"
-batch_scan_result = "BleScan{}onBatchScanResult"
-adv_fail = "BleAdvertise{}onFailure"
-adv_succ = "BleAdvertise{}onSuccess"
-bluetooth_off = "BluetoothStateChangedOff"
-bluetooth_on = "BluetoothStateChangedOn"
-mtu_changed = "GattConnect{}onMtuChanged"
-advertising_set_started = "AdvertisingSet{}onAdvertisingSetStarted"
-advertising_set_stopped = "AdvertisingSet{}onAdvertisingSetStopped"
-advertising_set_on_own_address_read = "AdvertisingSet{}onOwnAddressRead"
-advertising_set_enabled = "AdvertisingSet{}onAdvertisingEnabled"
-advertising_set_data_set = "AdvertisingSet{}onAdvertisingDataSet"
-advertising_set_scan_response_set = "AdvertisingSet{}onScanResponseDataSet"
-advertising_set_parameters_update = \
-    "AdvertisingSet{}onAdvertisingParametersUpdated"
-advertising_set_periodic_parameters_updated = \
-    "AdvertisingSet{}onPeriodicAdvertisingParametersUpdated"
-advertising_set_periodic_data_set = \
-    "AdvertisingSet{}onPeriodicAdvertisingDataSet"
-advertising_set_periodic_enable = "AdvertisingSet{}onPeriodicAdvertisingEnable"
-bluetooth_profile_connection_state_changed = \
-    "BluetoothProfileConnectionStateChanged"
-bluetooth_le_on = "BleStateChangedOn"
-bluetooth_le_off = "BleStateChangedOff"
-bluetooth_a2dp_codec_config_changed = "BluetoothA2dpCodecConfigChanged"
-# End Callback Strings
-
-batch_scan_not_supported_list = [
-    "Nexus 4",
-    "Nexus 5",
-    "Nexus 7",
-]
-
-### Generic Constants End ###
-
-### Bluetooth Constants Begin ###
-
-# rfcomm test uuids
-rfcomm_secure_uuid = "fa87c0d0-afac-11de-8a39-0800200c9a66"
-rfcomm_insecure_uuid = "8ce255c0-200a-11e0-ac64-0800200c9a66"
-
-# bluetooth socket connection test uuid
-bluetooth_socket_conn_test_uuid = "12345678-1234-5678-9abc-123456789abc"
-
-# Bluetooth Adapter Scan Mode Types
-bt_scan_mode_types = {
-    "state_off": -1,
-    "none": 0,
-    "connectable": 1,
-    "connectable_discoverable": 3
-}
-
-# Bluetooth Adapter State Constants
-bt_adapter_states = {
-    "off": 10,
-    "turning_on": 11,
-    "on": 12,
-    "turning_off": 13,
-    "ble_turning_on": 14,
-    "ble_on": 15,
-    "ble_turning_off": 16
-}
-
-# Should be kept in sync with BluetoothProfile.java
-bt_profile_constants = {
-    "headset": 1,
-    "a2dp": 2,
-    "health": 3,
-    "input_device": 4,
-    "pan": 5,
-    "pbap_server": 6,
-    "gatt": 7,
-    "gatt_server": 8,
-    "map": 9,
-    "sap": 10,
-    "a2dp_sink": 11,
-    "avrcp_controller": 12,
-    "headset_client": 16,
-    "pbap_client": 17,
-    "map_mce": 18
-}
-
-# Bluetooth RFCOMM UUIDs as defined by the SIG
-bt_rfcomm_uuids = {
-    "default_uuid": "457807c0-4897-11df-9879-0800200c9a66",
-    "base_uuid": "00000000-0000-1000-8000-00805F9B34FB",
-    "sdp": "00000001-0000-1000-8000-00805F9B34FB",
-    "udp": "00000002-0000-1000-8000-00805F9B34FB",
-    "rfcomm": "00000003-0000-1000-8000-00805F9B34FB",
-    "tcp": "00000004-0000-1000-8000-00805F9B34FB",
-    "tcs_bin": "00000005-0000-1000-8000-00805F9B34FB",
-    "tcs_at": "00000006-0000-1000-8000-00805F9B34FB",
-    "att": "00000007-0000-1000-8000-00805F9B34FB",
-    "obex": "00000008-0000-1000-8000-00805F9B34FB",
-    "ip": "00000009-0000-1000-8000-00805F9B34FB",
-    "ftp": "0000000A-0000-1000-8000-00805F9B34FB",
-    "http": "0000000C-0000-1000-8000-00805F9B34FB",
-    "wsp": "0000000E-0000-1000-8000-00805F9B34FB",
-    "bnep": "0000000F-0000-1000-8000-00805F9B34FB",
-    "upnp": "00000010-0000-1000-8000-00805F9B34FB",
-    "hidp": "00000011-0000-1000-8000-00805F9B34FB",
-    "hardcopy_control_channel": "00000012-0000-1000-8000-00805F9B34FB",
-    "hardcopy_data_channel": "00000014-0000-1000-8000-00805F9B34FB",
-    "hardcopy_notification": "00000016-0000-1000-8000-00805F9B34FB",
-    "avctp": "00000017-0000-1000-8000-00805F9B34FB",
-    "avdtp": "00000019-0000-1000-8000-00805F9B34FB",
-    "cmtp": "0000001B-0000-1000-8000-00805F9B34FB",
-    "mcap_control_channel": "0000001E-0000-1000-8000-00805F9B34FB",
-    "mcap_data_channel": "0000001F-0000-1000-8000-00805F9B34FB",
-    "l2cap": "00000100-0000-1000-8000-00805F9B34FB"
-}
-
-# Should be kept in sync with BluetoothProfile#STATE_* constants.
-bt_profile_states = {
-    "disconnected": 0,
-    "connecting": 1,
-    "connected": 2,
-    "disconnecting": 3
-}
-
-# Access Levels from BluetoothDevice.
-bt_access_levels = {"access_allowed": 1, "access_denied": 2}
-
-# Priority levels as defined in BluetoothProfile.java.
-bt_priority_levels = {
-    "auto_connect": 1000,
-    "on": 100,
-    "off": 0,
-    "undefined": -1
-}
-
-# A2DP codec configuration constants as defined in
-# frameworks/base/core/java/android/bluetooth/BluetoothCodecConfig.java
-codec_types = {
-    'SBC': 0,
-    'AAC': 1,
-    'APTX': 2,
-    'APTX-HD': 3,
-    'LDAC': 4,
-    'MAX': 5,
-    'INVALID': 1000000
-}
-
-codec_priorities = {'DISABLED': -1, 'DEFAULT': 0, 'HIGHEST': 1000000}
-
-sample_rates = {
-    'NONE': 0,
-    '44100': 0x1 << 0,
-    '48000': 0x1 << 1,
-    '88200': 0x1 << 2,
-    '96000': 0x1 << 3,
-    '176400': 0x1 << 4,
-    '192000': 0x1 << 5
-}
-
-bits_per_samples = {'NONE': 0, '16': 0x1 << 0, '24': 0x1 << 1, '32': 0x1 << 2}
-
-channel_modes = {'NONE': 0, 'MONO': 0x1 << 0, 'STEREO': 0x1 << 1}
-
-# Bluetooth HID constants.
-hid_connection_timeout = 5
-
-# Bluetooth HID EventFacade constants.
-hid_on_set_report_event = "onSetReport"
-hid_on_get_report_event = "onGetReport"
-hid_on_set_protocol_event = "onSetProtocol"
-hid_on_intr_data_event = "onInterruptData"
-hid_on_virtual_cable_unplug_event = "onVirtualCableUnplug"
-hid_id_keyboard = 1
-hid_id_mouse = 2
-hid_default_event_timeout = 15
-hid_default_set_report_payload = "Haha"
-
-### Bluetooth Constants End ###
-
-### Bluetooth Low Energy Constants Begin ###
-
-# Bluetooth Low Energy scan callback types
-ble_scan_settings_callback_types = {
-    "all_matches": 1,
-    "first_match": 2,
-    "match_lost": 4,
-    "found_and_lost": 6
-}
-
-# Bluetooth Low Energy scan settings match mode
-ble_scan_settings_match_modes = {"aggresive": 1, "sticky": 2}
-
-# Bluetooth Low Energy scan settings match nums
-ble_scan_settings_match_nums = {"one": 1, "few": 2, "max": 3}
-
-# Bluetooth Low Energy scan settings result types
-ble_scan_settings_result_types = {"full": 0, "abbreviated": 1}
-
-# Bluetooth Low Energy scan settings mode
-ble_scan_settings_modes = {
-    "opportunistic": -1,
-    "low_power": 0,
-    "balanced": 1,
-    "low_latency": 2
-}
-
-# Bluetooth Low Energy scan settings report delay millis
-ble_scan_settings_report_delay_milli_seconds = {
-    "min": 0,
-    "max": 9223372036854775807
-}
-
-# Bluetooth Low Energy scan settings phy
-ble_scan_settings_phys = {"1m": 1, "coded": 3, "all_supported": 255}
-
-# Bluetooth Low Energy advertise settings types
-ble_advertise_settings_types = {"non_connectable": 0, "connectable": 1}
-
-# Bluetooth Low Energy advertise settings modes
-ble_advertise_settings_modes = {
-    "low_power": 0,
-    "balanced": 1,
-    "low_latency": 2
-}
-
-# Bluetooth Low Energy advertise settings tx power
-ble_advertise_settings_tx_powers = {
-    "ultra_low": 0,
-    "low": 1,
-    "medium": 2,
-    "high": 3
-}
-
-# Bluetooth Low Energy advertise settings own address type
-ble_advertise_settings_own_address_types = {
-    "public": 0,
-    "random": 1
-}
-
-# Bluetooth Low Energy service uuids for specific devices
-ble_uuids = {
-    "p_service": "0000feef-0000-1000-8000-00805f9b34fb",
-    "hr_service": "0000180d-0000-1000-8000-00805f9b34fb"
-}
-
-# Bluetooth Low Energy advertising error codes
-ble_advertise_error_code = {
-    "data_too_large": 1,
-    "too_many_advertisers": 2,
-    "advertisement_already_started": 3,
-    "bluetooth_internal_failure": 4,
-    "feature_not_supported": 5
-}
-
-### Bluetooth Low Energy Constants End ###
-
-### Bluetooth GATT Constants Begin ###
-
-# Gatt Callback error messages
-gatt_cb_err = {
-    "char_write_req_err":
-    "Characteristic Write Request event not found. Expected {}",
-    "char_write_err": "Characteristic Write event not found. Expected {}",
-    "desc_write_req_err":
-    "Descriptor Write Request event not found. Expected {}",
-    "desc_write_err": "Descriptor Write event not found. Expected {}",
-    "char_read_err": "Characteristic Read event not found. Expected {}",
-    "char_read_req_err": "Characteristic Read Request not found. Expected {}",
-    "desc_read_err": "Descriptor Read event not found. Expected {}",
-    "desc_read_req_err":
-    "Descriptor Read Request event not found. Expected {}",
-    "rd_remote_rssi_err": "Read Remote RSSI event not found. Expected {}",
-    "gatt_serv_disc_err":
-    "GATT Services Discovered event not found. Expected {}",
-    "serv_added_err": "Service Added event not found. Expected {}",
-    "mtu_changed_err": "MTU Changed event not found. Expected {}",
-    "mtu_serv_changed_err": "MTU Server Changed event not found. Expected {}",
-    "gatt_conn_changed_err":
-    "GATT Connection Changed event not found. Expected {}",
-    "char_change_err":
-    "GATT Characteristic Changed event not fond. Expected {}",
-    "phy_read_err": "Phy Read event not fond. Expected {}",
-    "phy_update_err": "Phy Update event not fond. Expected {}",
-    "exec_write_err": "GATT Execute Write event not found. Expected {}"
-}
-
-# GATT callback strings as defined in GattClientFacade.java and
-# GattServerFacade.java implemented callbacks.
-gatt_cb_strings = {
-    "char_write_req": "GattServer{}onCharacteristicWriteRequest",
-    "exec_write": "GattServer{}onExecuteWrite",
-    "char_write": "GattConnect{}onCharacteristicWrite",
-    "desc_write_req": "GattServer{}onDescriptorWriteRequest",
-    "desc_write": "GattConnect{}onDescriptorWrite",
-    "char_read": "GattConnect{}onCharacteristicRead",
-    "char_read_req": "GattServer{}onCharacteristicReadRequest",
-    "desc_read": "GattConnect{}onDescriptorRead",
-    "desc_read_req": "GattServer{}onDescriptorReadRequest",
-    "rd_remote_rssi": "GattConnect{}onReadRemoteRssi",
-    "rd_remote_ble_rssi": "BleScan{}onScanResults",
-    "gatt_serv_disc": "GattConnect{}onServicesDiscovered",
-    "serv_added": "GattServer{}onServiceAdded",
-    "mtu_changed": "GattConnect{}onMtuChanged",
-    "mtu_serv_changed": "GattServer{}onMtuChanged",
-    "gatt_conn_change": "GattConnect{}onConnectionStateChange",
-    "char_change": "GattConnect{}onCharacteristicChanged",
-    "phy_read": "GattConnect{}onPhyRead",
-    "phy_update": "GattConnect{}onPhyUpdate",
-    "serv_phy_read": "GattServer{}onPhyRead",
-    "serv_phy_update": "GattServer{}onPhyUpdate",
-}
-
-# GATT event dictionary of expected callbacks and errors.
-gatt_event = {
-    "char_write_req": {
-        "evt": gatt_cb_strings["char_write_req"],
-        "err": gatt_cb_err["char_write_req_err"]
-    },
-    "exec_write": {
-        "evt": gatt_cb_strings["exec_write"],
-        "err": gatt_cb_err["exec_write_err"]
-    },
-    "char_write": {
-        "evt": gatt_cb_strings["char_write"],
-        "err": gatt_cb_err["char_write_err"]
-    },
-    "desc_write_req": {
-        "evt": gatt_cb_strings["desc_write_req"],
-        "err": gatt_cb_err["desc_write_req_err"]
-    },
-    "desc_write": {
-        "evt": gatt_cb_strings["desc_write"],
-        "err": gatt_cb_err["desc_write_err"]
-    },
-    "char_read": {
-        "evt": gatt_cb_strings["char_read"],
-        "err": gatt_cb_err["char_read_err"]
-    },
-    "char_read_req": {
-        "evt": gatt_cb_strings["char_read_req"],
-        "err": gatt_cb_err["char_read_req_err"]
-    },
-    "desc_read": {
-        "evt": gatt_cb_strings["desc_read"],
-        "err": gatt_cb_err["desc_read_err"]
-    },
-    "desc_read_req": {
-        "evt": gatt_cb_strings["desc_read_req"],
-        "err": gatt_cb_err["desc_read_req_err"]
-    },
-    "rd_remote_rssi": {
-        "evt": gatt_cb_strings["rd_remote_rssi"],
-        "err": gatt_cb_err["rd_remote_rssi_err"]
-    },
-    "gatt_serv_disc": {
-        "evt": gatt_cb_strings["gatt_serv_disc"],
-        "err": gatt_cb_err["gatt_serv_disc_err"]
-    },
-    "serv_added": {
-        "evt": gatt_cb_strings["serv_added"],
-        "err": gatt_cb_err["serv_added_err"]
-    },
-    "mtu_changed": {
-        "evt": gatt_cb_strings["mtu_changed"],
-        "err": gatt_cb_err["mtu_changed_err"]
-    },
-    "gatt_conn_change": {
-        "evt": gatt_cb_strings["gatt_conn_change"],
-        "err": gatt_cb_err["gatt_conn_changed_err"]
-    },
-    "char_change": {
-        "evt": gatt_cb_strings["char_change"],
-        "err": gatt_cb_err["char_change_err"]
-    },
-    "phy_read": {
-        "evt": gatt_cb_strings["phy_read"],
-        "err": gatt_cb_err["phy_read_err"]
-    },
-    "phy_update": {
-        "evt": gatt_cb_strings["phy_update"],
-        "err": gatt_cb_err["phy_update_err"]
-    },
-    "serv_phy_read": {
-        "evt": gatt_cb_strings["serv_phy_read"],
-        "err": gatt_cb_err["phy_read_err"]
-    },
-    "serv_phy_update": {
-        "evt": gatt_cb_strings["serv_phy_update"],
-        "err": gatt_cb_err["phy_update_err"]
-    }
-}
-
-# Matches constants of connection states defined in BluetoothGatt.java
-gatt_connection_state = {
-    "disconnected": 0,
-    "connecting": 1,
-    "connected": 2,
-    "disconnecting": 3,
-    "closed": 4
-}
-
-# Matches constants of Bluetooth GATT Characteristic values as defined
-# in BluetoothGattCharacteristic.java
-gatt_characteristic = {
-    "property_broadcast": 0x01,
-    "property_read": 0x02,
-    "property_write_no_response": 0x04,
-    "property_write": 0x08,
-    "property_notify": 0x10,
-    "property_indicate": 0x20,
-    "property_signed_write": 0x40,
-    "property_extended_props": 0x80,
-    "permission_read": 0x01,
-    "permission_read_encrypted": 0x02,
-    "permission_read_encrypted_mitm": 0x04,
-    "permission_write": 0x10,
-    "permission_write_encrypted": 0x20,
-    "permission_write_encrypted_mitm": 0x40,
-    "permission_write_signed": 0x80,
-    "permission_write_signed_mitm": 0x100,
-    "write_type_default": 0x02,
-    "write_type_no_response": 0x01,
-    "write_type_signed": 0x04,
-}
-
-# Matches constants of Bluetooth GATT Characteristic values as defined
-# in BluetoothGattDescriptor.java
-gatt_descriptor = {
-    "enable_notification_value": [0x01, 0x00],
-    "enable_indication_value": [0x02, 0x00],
-    "disable_notification_value": [0x00, 0x00],
-    "permission_read": 0x01,
-    "permission_read_encrypted": 0x02,
-    "permission_read_encrypted_mitm": 0x04,
-    "permission_write": 0x10,
-    "permission_write_encrypted": 0x20,
-    "permission_write_encrypted_mitm": 0x40,
-    "permission_write_signed": 0x80,
-    "permission_write_signed_mitm": 0x100
-}
-
-# https://www.bluetooth.com/specifications/gatt/descriptors
-gatt_char_desc_uuids = {
-    "char_ext_props": '00002900-0000-1000-8000-00805f9b34fb',
-    "char_user_desc": '00002901-0000-1000-8000-00805f9b34fb',
-    "client_char_cfg": '00002902-0000-1000-8000-00805f9b34fb',
-    "server_char_cfg": '00002903-0000-1000-8000-00805f9b34fb',
-    "char_fmt_uuid": '00002904-0000-1000-8000-00805f9b34fb',
-    "char_agreg_fmt": '00002905-0000-1000-8000-00805f9b34fb',
-    "char_valid_range": '00002906-0000-1000-8000-00805f9b34fb',
-    "external_report_reference": '00002907-0000-1000-8000-00805f9b34fb',
-    "report_reference": '00002908-0000-1000-8000-00805f9b34fb'
-}
-
-# https://www.bluetooth.com/specifications/gatt/characteristics
-gatt_char_types = {
-    "device_name": '00002a00-0000-1000-8000-00805f9b34fb',
-    "appearance": '00002a01-0000-1000-8000-00805f9b34fb',
-    "peripheral_priv_flag": '00002a02-0000-1000-8000-00805f9b34fb',
-    "reconnection_address": '00002a03-0000-1000-8000-00805f9b34fb',
-    "peripheral_pref_conn": '00002a04-0000-1000-8000-00805f9b34fb',
-    "service_changed": '00002a05-0000-1000-8000-00805f9b34fb',
-    "system_id": '00002a23-0000-1000-8000-00805f9b34fb',
-    "model_number_string": '00002a24-0000-1000-8000-00805f9b34fb',
-    "serial_number_string": '00002a25-0000-1000-8000-00805f9b34fb',
-    "firmware_revision_string": '00002a26-0000-1000-8000-00805f9b34fb',
-    "hardware_revision_string": '00002a27-0000-1000-8000-00805f9b34fb',
-    "software_revision_string": '00002a28-0000-1000-8000-00805f9b34fb',
-    "manufacturer_name_string": '00002a29-0000-1000-8000-00805f9b34fb',
-    "pnp_id": '00002a50-0000-1000-8000-00805f9b34fb',
-}
-
-# Matches constants of Bluetooth GATT Characteristic values as defined
-# in BluetoothGattCharacteristic.java
-gatt_characteristic_value_format = {
-    "string": 0x1,
-    "byte": 0x2,
-    "sint8": 0x21,
-    "uint8": 0x11,
-    "sint16": 0x22,
-    "unit16": 0x12,
-    "sint32": 0x24,
-    "uint32": 0x14
-}
-
-# Matches constants of Bluetooth Gatt Service types as defined in
-# BluetoothGattService.java
-gatt_service_types = {"primary": 0, "secondary": 1}
-
-# Matches constants of Bluetooth Gatt Connection Priority values as defined in
-# BluetoothGatt.java
-gatt_connection_priority = {"balanced": 0, "high": 1, "low_power": 2}
-
-# Min and max MTU values
-gatt_mtu_size = {"min": 23, "max": 217}
-
-# Gatt Characteristic attribute lengths
-gatt_characteristic_attr_length = {"attr_1": 1, "attr_2": 3, "attr_3": 15}
-
-# Matches constants of Bluetooth Gatt operations status as defined in
-# BluetoothGatt.java
-gatt_status = {"success": 0, "failure": 0x101}
-
-# Matches constants of Bluetooth transport values as defined in
-# BluetoothDevice.java
-gatt_transport = {"auto": 0x00, "bredr": 0x01, "le": 0x02}
-
-# Matches constants of Bluetooth physical channeling values as defined in
-# BluetoothDevice.java
-gatt_phy = {"1m": 1, "2m": 2, "le_coded": 3}
-
-# Matches constants of Bluetooth physical channeling bitmask values as defined
-# in BluetoothDevice.java
-gatt_phy_mask = {"1m_mask": 1, "2m_mask": 2, "coded_mask": 4}
-
-# Values as defiend in the Bluetooth GATT specification
-gatt_server_responses = {
-    "GATT_SUCCESS": 0x0,
-    "GATT_FAILURE": 0x1,
-    "GATT_READ_NOT_PERMITTED": 0x2,
-    "GATT_WRITE_NOT_PERMITTED": 0x3,
-    "GATT_INVALID_PDU": 0x4,
-    "GATT_INSUFFICIENT_AUTHENTICATION": 0x5,
-    "GATT_REQUEST_NOT_SUPPORTED": 0x6,
-    "GATT_INVALID_OFFSET": 0x7,
-    "GATT_INSUFFICIENT_AUTHORIZATION": 0x8,
-    "GATT_INVALID_ATTRIBUTE_LENGTH": 0xd,
-    "GATT_INSUFFICIENT_ENCRYPTION": 0xf,
-    "GATT_CONNECTION_CONGESTED": 0x8f,
-    "GATT_13_ERR": 0x13,
-    "GATT_12_ERR": 0x12,
-    "GATT_0C_ERR": 0x0C,
-    "GATT_16": 0x16
-}
-
-### Bluetooth GATT Constants End ###
-
-### Chameleon Constants Begin ###
-
-# Chameleon audio bits per sample.
-audio_bits_per_sample_16 = 16
-audio_bits_per_sample_24 = 24
-audio_bits_per_sample_32 = 32
-
-# Chameleon audio sample rates.
-audio_sample_rate_44100 = 44100
-audio_sample_rate_48000 = 48000
-audio_sample_rate_88200 = 88200
-audio_sample_rate_96000 = 96000
-
-# Chameleon audio channel modes.
-audio_channel_mode_mono = 1
-audio_channel_mode_stereo = 2
-audio_channel_mode_8 = 8
-
-# Chameleon time delays.
-delay_after_binding_seconds = 0.5
-delay_before_record_seconds = 0.5
-silence_wait_seconds = 5
-
-# Chameleon bus endpoints.
-fpga_linein_bus_endpoint = 'Chameleon FPGA line-in'
-headphone_bus_endpoint = 'Cros device headphone'
-
-### Chameleon Constants End ###
-
-# Begin logcat strings dict"""
-logcat_strings = {
-    "media_playback_vol_changed": "onRouteVolumeChanged",
-}
-
-# End logcat strings dict"""
-
-### Begin Service Discovery UUIDS ###
-# Values match the Bluetooth SIG defined values: """
-""" https://www.bluetooth.com/specifications/assigned-numbers/service-discovery """
-sig_uuid_constants = {
-    "BASE_UUID": "0000{}-0000-1000-8000-00805F9B34FB",
-    "SDP": "0001",
-    "UDP": "0002",
-    "RFCOMM": "0003",
-    "TCP": "0004",
-    "TCS-BIN": "0005",
-    "TCS-AT": "0006",
-    "ATT": "0007",
-    "OBEX": "0008",
-    "IP": "0009",
-    "FTP": "000A",
-    "HTTP": "000C",
-    "WSP": "000E",
-    "BNEP": "000F",
-    "UPNP": "0010",
-    "HIDP": "0011",
-    "HardcopyControlChannel": "0012",
-    "HardcopyDataChannel": "0014",
-    "HardcopyNotification": "0016",
-    "AVCTP": "0017",
-    "AVDTP": "0019",
-    "CMTP": "001B",
-    "MCAPControlChannel": "001E",
-    "MCAPDataChannel": "001F",
-    "L2CAP": "0100",
-    "ServiceDiscoveryServerServiceClassID": "1000",
-    "BrowseGroupDescriptorServiceClassID": "1001",
-    "SerialPort": "1101",
-    "LANAccessUsingPPP": "1102",
-    "DialupNetworking": "1103",
-    "IrMCSync": "1104",
-    "OBEXObjectPush": "1105",
-    "OBEXFileTransfer": "1106",
-    "IrMCSyncCommand": "1107",
-    "Headset": "1108",
-    "CordlessTelephony": "1109",
-    "AudioSource": "110A",
-    "AudioSink": "110B",
-    "A/V_RemoteControlTarget": "110C",
-    "AdvancedAudioDistribution": "110D",
-    "A/V_RemoteControl": "110E",
-    "A/V_RemoteControlController": "110F",
-    "Intercom": "1110",
-    "Fax": "1111",
-    "Headset - Audio Gateway (AG)": "1112",
-    "WAP": "1113",
-    "WAP_CLIENT": "1114",
-    "PANU": "1115",
-    "NAP": "1116",
-    "GN": "1117",
-    "DirectPrinting": "1118",
-    "ReferencePrinting": "1119",
-    "ImagingResponder": "111B",
-    "ImagingAutomaticArchive": "111C",
-    "ImagingReferencedObjects": "111D",
-    "Handsfree": "111E",
-    "HandsfreeAudioGateway": "111F",
-    "DirectPrintingReferenceObjectsService": "1120",
-    "ReflectedUI": "1121",
-    "BasicPrinting": "1122",
-    "PrintingStatus": "1123",
-    "HumanInterfaceDeviceService": "1124",
-    "HardcopyCableReplacement": "1125",
-    "HCR_Print": "1126",
-    "HCR_Scan": "1127",
-    "Common_ISDN_Access": "1128",
-    "SIM_Access": "112D",
-    "Phonebook Access - PCE": "112E",
-    "Phonebook Access - PSE": "112F",
-    "Phonebook Access": "1130",
-    "Headset - HS": "1131",
-    "Message Access Server": "1132",
-    "Message Notification Server": "1133",
-    "Message Access Profile": "1134",
-    "GNSS": "1135",
-    "GNSS_Server": "1136",
-    "PnPInformation": "1200",
-    "GenericNetworking": "1201",
-    "GenericFileTransfer": "1202",
-    "GenericAudio": "1203",
-    "GenericTelephony": "1204",
-    "UPNP_Service": "1205",
-    "UPNP_IP_Service": "1206",
-    "ESDP_UPNP_IP_PAN": "1300",
-    "ESDP_UPNP_IP_LAP": "1301",
-    "ESDP_UPNP_L2CAP": "1302",
-    "VideoSource": "1303",
-    "VideoSink": "1304",
-    "VideoDistribution": "1305",
-    "HDP": "1400"
-}
-
-### End Service Discovery UUIDS ###
-
-### Begin Appearance Constants ###
-# https://www.bluetooth.com/wp-content/uploads/Sitecore-Media-Library/Gatt/Xml/Characteristics/org.bluetooth.characteristic.gap.appearance.xml
-sig_appearance_constants = {
-    "UNKNOWN": 0,
-    "PHONE": 64,
-    "COMPUTER": 128,
-    "WATCH": 192,
-    "WATCH_SPORTS": 193,
-    "CLOCK": 256,
-    "DISPLAY": 320,
-    "REMOTE_CONTROL": 384,
-    "EYE_GLASSES": 448,
-    "TAG": 512,
-    "KEYRING": 576,
-    "MEDIA_PLAYER": 640,
-    "BARCODE_SCANNER": 704,
-    "THERMOMETER": 768,
-    "THERMOMETER_EAR": 769,
-    "HEART_RATE_SENSOR": 832,
-    "HEART_RATE_SENSOR_BELT": 833,
-    "BLOOD_PRESSURE": 896,
-    "BLOOD_PRESSURE_ARM": 897,
-    "BLOOD_PRESSURE_WRIST": 898,
-    "HID": 960,
-    "HID_KEYBOARD": 961,
-    "HID_MOUSE": 962,
-    "HID_JOYSTICK": 963,
-    "HID_GAMEPAD": 964,
-    "HID_DIGITIZER_TABLET": 965,
-    "HID_CARD_READER": 966,
-    "HID_DIGITAL_PEN": 967,
-    "HID_BARCODE_SCANNER": 968,
-    "GLUCOSE_METER": 1024,
-    "RUNNING_WALKING_SENSOR": 1088,
-    "RUNNING_WALKING_SENSOR_IN_SHOE": 1089,
-    "RUNNING_WALKING_SENSOR_ON_SHOE": 1090,
-    "RUNNING_WALKING_SENSOR_ON_HIP": 1091,
-    "CYCLING": 1152,
-    "CYCLING_COMPUTER": 1153,
-    "CYCLING_SPEED_SENSOR": 1154,
-    "CYCLING_CADENCE_SENSOR": 1155,
-    "CYCLING_POWER_SENSOR": 1156,
-    "CYCLING_SPEED_AND_CADENCE_SENSOR": 1157,
-    "PULSE_OXIMETER": 3136,
-    "PULSE_OXIMETER_FINGERTIP": 3137,
-    "PULSE_OXIMETER_WRIST": 3138,
-    "WEIGHT_SCALE": 3200,
-    "PERSONAL_MOBILITY": 3264,
-    "PERSONAL_MOBILITY_WHEELCHAIR": 3265,
-    "PERSONAL_MOBILITY_SCOOTER": 3266,
-    "GLUCOSE_MONITOR": 3328,
-    "SPORTS_ACTIVITY": 5184,
-    "SPORTS_ACTIVITY_LOCATION_DISPLAY": 5185,
-    "SPORTS_ACTIVITY_LOCATION_AND_NAV_DISPLAY": 5186,
-    "SPORTS_ACTIVITY_LOCATION_POD": 5187,
-    "SPORTS_ACTIVITY_LOCATION_AND_NAV_POD": 5188,
-}
-
-### End Appearance Constants ###
-
-# Attribute Record values from the Bluetooth Specification
-# Version 5, Vol 3, Part B
-bt_attribute_values = {
-    'ATTR_SERVICE_RECORD_HANDLE': 0x0000,
-    'ATTR_SERVICE_CLASS_ID_LIST': 0x0001,
-    'ATTR_SERVICE_RECORD_STATE': 0x0002,
-    'ATTR_SERVICE_ID': 0x0003,
-    'ATTR_PROTOCOL_DESCRIPTOR_LIST': 0x0004,
-    'ATTR_ADDITIONAL_PROTOCOL_DESCRIPTOR_LIST': 0x000D,
-    'ATTR_BROWSE_GROUP_LIST': 0x0005,
-    'ATTR_LANGUAGE_BASE_ATTRIBUTE_ID_LIST': 0x0006,
-    'ATTR_SERVICE_INFO_TIME_TO_LIVE': 0x0007,
-    'ATTR_SERVICE_AVAILABILITY': 0x0008,
-    'ATTR_BLUETOOTH_PROFILE_DESCRIPTOR_LIST': 0x0009,
-    'ATTR_A2DP_SUPPORTED_FEATURES': 0x0311,
-}
diff --git a/src/antlion/test_utils/bt/bt_contacts_utils.py b/src/antlion/test_utils/bt/bt_contacts_utils.py
deleted file mode 100644
index 6abafcb..0000000
--- a/src/antlion/test_utils/bt/bt_contacts_utils.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Compare_contacts accepts 2 vcf files, extracts full name, email, and
-telephone numbers from each and reports how many unique cards it finds across
-the two files.
-"""
-
-from mmap import ACCESS_READ
-from mmap import mmap
-import logging
-import re
-import random
-import string
-import time
-from antlion.utils import exe_cmd
-import queue
-
-# CallLog types
-INCOMMING_CALL_TYPE = "1"
-OUTGOING_CALL_TYPE = "2"
-MISSED_CALL_TYPE = "3"
-
-# Callback strings.
-CONTACTS_CHANGED_CALLBACK = "ContactsChanged"
-CALL_LOG_CHANGED = "CallLogChanged"
-CONTACTS_ERASED_CALLBACK = "ContactsErased"
-
-# URI for contacts database on Nexus.
-CONTACTS_URI = "content://com.android.contacts/data/phones"
-
-# Path for temporary file storage on device.
-STORAGE_PATH = "/storage/emulated/0/Download/"
-
-PBAP_SYNC_TIME = 30
-
-log = logging
-
-
-def parse_contacts(file_name):
-    """Read vcf file and generate a list of contacts.
-
-    Contacts full name, prefered email, and all phone numbers are extracted.
-    """
-
-    vcard_regex = re.compile(b"^BEGIN:VCARD((\n*?.*?)*?)END:VCARD",
-                             re.MULTILINE)
-    fullname_regex = re.compile(b"^FN:(.*)", re.MULTILINE)
-    email_regex = re.compile(b"^EMAIL;PREF:(.*)", re.MULTILINE)
-    tel_regex = re.compile(b"^TEL;(.*):(.*)", re.MULTILINE)
-
-    with open(file_name, "r") as contacts_file:
-        contacts = []
-        contacts_map = mmap(
-            contacts_file.fileno(), length=0, access=ACCESS_READ)
-        new_contact = None
-
-        # Find all VCARDs in the input file, then extract the first full name,
-        # first email address, and all phone numbers from it.  If there is at
-        # least a full name add it to the contact list.
-        for current_vcard in vcard_regex.findall(contacts_map):
-            new_contact = VCard()
-
-            fullname = fullname_regex.search(current_vcard[0])
-            if fullname is not None:
-                new_contact.name = fullname.group(1)
-
-            email = email_regex.search(current_vcard[0])
-            if email is not None:
-                new_contact.email = email.group(1)
-
-            for phone_number in tel_regex.findall(current_vcard[0]):
-                new_contact.add_phone_number(
-                    PhoneNumber(phone_number[0], phone_number[1]))
-
-            contacts.append(new_contact)
-
-        return contacts
-
-
-def phone_number_count(destination_path, file_name):
-    """Counts number of phone numbers in a VCF.
-    """
-    tel_regex = re.compile(b"^TEL;(.*):(.*)", re.MULTILINE)
-    with open("{}{}".format(destination_path, file_name),
-              "r") as contacts_file:
-        contacts_map = mmap(
-            contacts_file.fileno(), length=0, access=ACCESS_READ)
-        numbers = tel_regex.findall(contacts_map)
-        return len(numbers)
-
-
-def count_contacts_with_differences(destination_path,
-                                    pce_contacts_vcf_file_name,
-                                    pse_contacts_vcf_file_name):
-    """Compare two contact files and report the number of differences.
-
-    Difference count is returned, and the differences are logged, this is order
-    independent.
-    """
-
-    pce_contacts = parse_contacts("{}{}".format(destination_path,
-                                                pce_contacts_vcf_file_name))
-    pse_contacts = parse_contacts("{}{}".format(destination_path,
-                                                pse_contacts_vcf_file_name))
-
-    differences = set(pce_contacts).symmetric_difference(set(pse_contacts))
-    if not differences:
-        log.info("All {} contacts in the phonebooks match".format(
-            str(len(pce_contacts))))
-    else:
-        log.info("{} contacts match, but ".format(
-            str(len(set(pce_contacts).intersection(set(pse_contacts))))))
-        log.info("the following {} entries don't match:".format(
-            str(len(differences))))
-        for current_vcard in differences:
-            log.info(current_vcard)
-    return len(differences)
-
-
-class PhoneNumber(object):
-    """Simple class for maintaining a phone number entry and type with only the
-    digits.
-    """
-
-    def __init__(self, phone_type, phone_number):
-        self.phone_type = phone_type
-        # remove non digits from phone_number
-        self.phone_number = re.sub(r"\D", "", str(phone_number))
-
-    def __eq__(self, other):
-        return (self.phone_type == other.phone_type and
-                self.phone_number == other.phone_number)
-
-    def __hash__(self):
-        return hash(self.phone_type) ^ hash(self.phone_number)
-
-
-class VCard(object):
-    """Contains name, email, and phone numbers.
-    """
-
-    def __init__(self):
-        self.name = None
-        self.first_name = None
-        self.last_name = None
-        self.email = None
-        self.phone_numbers = []
-        self.photo = None
-
-    def __lt__(self, other):
-        return self.name < other.name
-
-    def __hash__(self):
-        result = hash(self.name) ^ hash(self.email) ^ hash(self.photo == None)
-        for number in self.phone_numbers:
-            result ^= hash(number)
-        return result
-
-    def __eq__(self, other):
-        return hash(self) == hash(other)
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    def __str__(self):
-        vcard_strings = ["BEGIN:VCARD\n", "VERSION:2.1\n"]
-
-        if self.first_name or self.last_name:
-            vcard_strings.append("N:{};{};;;\nFN:{} {}\n".format(
-                self.last_name, self.first_name, self.first_name,
-                self.last_name))
-        elif self.name:
-            vcard_strings.append("FN:{}\n".format(self.name))
-
-        if self.phone_numbers:
-            for phone in self.phone_numbers:
-                vcard_strings.append("TEL;{}:{}\n".format(
-                    str(phone.phone_type), phone.phone_number))
-
-        if self.email:
-            vcard_strings.append("EMAIL;PREF:{}\n".format(self.email))
-
-        vcard_strings.append("END:VCARD\n")
-        return "".join(vcard_strings)
-
-    def add_phone_number(self, phone_number):
-        if phone_number not in self.phone_numbers:
-            self.phone_numbers.append(phone_number)
-
-
-def generate_random_phone_number():
-    """Generate a random phone number/type
-    """
-    return PhoneNumber("CELL",
-                       "+{0:010d}".format(random.randint(0, 9999999999)))
-
-
-def generate_random_string(length=8,
-                           charset="{}{}{}".format(string.digits,
-                                                   string.ascii_letters,
-                                                   string.punctuation)):
-    """Generate a random string of specified length from the characterset
-    """
-    # Remove ; since that would make 2 words.
-    charset = charset.replace(";", "")
-    name = []
-    for i in range(length):
-        name.append(random.choice(charset))
-    return "".join(name)
-
-
-def generate_contact_list(destination_path,
-                          file_name,
-                          contact_count,
-                          phone_number_count=1):
-    """Generate a simple VCF file for count contacts with basic content.
-
-    An example with count = 1 and local_number = 2]
-
-    BEGIN:VCARD
-    VERSION:2.1
-    N:Person;1;;;
-    FN:1 Person
-    TEL;CELL:+1-555-555-1234
-    TEL;CELL:+1-555-555-4321
-    EMAIL;PREF:person1@gmail.com
-    END:VCARD
-    """
-    vcards = []
-    for i in range(contact_count):
-        current_contact = VCard()
-        current_contact.first_name = generate_random_string(
-            random.randint(1, 19))
-        current_contact.last_name = generate_random_string(
-            random.randint(1, 19))
-        current_contact.email = "{}{}@{}.{}".format(
-            current_contact.last_name, current_contact.first_name,
-            generate_random_string(random.randint(1, 19)),
-            generate_random_string(random.randint(1, 4)))
-        for number in range(phone_number_count):
-            current_contact.add_phone_number(generate_random_phone_number())
-        vcards.append(current_contact)
-    create_new_contacts_vcf_from_vcards(destination_path, file_name, vcards)
-
-
-def create_new_contacts_vcf_from_vcards(destination_path, vcf_file_name,
-                                        vcards):
-    """Create a new file with filename
-    """
-    contact_file = open("{}{}".format(destination_path, vcf_file_name), "w+")
-    for card in vcards:
-        contact_file.write(str(card))
-    contact_file.close()
-
-
-def get_contact_count(device):
-    """Returns the number of name:phone number pairs.
-    """
-    contact_list = device.droid.contactsQueryContent(
-        CONTACTS_URI, ["display_name", "data1"], "", [], "display_name")
-    return len(contact_list)
-
-
-def import_device_contacts_from_vcf(device, destination_path, vcf_file, timeout=10):
-    """Uploads and import vcf file to device.
-    """
-    number_count = phone_number_count(destination_path, vcf_file)
-    device.log.info("Trying to add {} phone numbers.".format(number_count))
-    local_phonebook_path = "{}{}".format(destination_path, vcf_file)
-    phone_phonebook_path = "{}{}".format(STORAGE_PATH, vcf_file)
-    device.adb.push("{} {}".format(local_phonebook_path, phone_phonebook_path))
-    device.droid.importVcf("file://{}{}".format(STORAGE_PATH, vcf_file))
-    start_time = time.time()
-    while time.time() < start_time + timeout:
-        #TODO: use unattended way to bypass contact import module instead of keyevent
-        if "ImportVCardActivity" in device.get_my_current_focus_window():
-            # keyevent to allow contacts import from vcf file
-            for key in ["DPAD_RIGHT", "DPAD_RIGHT", "ENTER"]:
-                device.adb.shell("input keyevent KEYCODE_{}".format(key))
-            break
-        time.sleep(1)
-    if wait_for_phone_number_update_complete(device, number_count):
-        return number_count
-    else:
-        return 0
-
-
-def export_device_contacts_to_vcf(device, destination_path, vcf_file):
-    """Export and download vcf file from device.
-    """
-    path_on_phone = "{}{}".format(STORAGE_PATH, vcf_file)
-    device.droid.exportVcf("{}".format(path_on_phone))
-    # Download and then remove file from device
-    device.adb.pull("{} {}".format(path_on_phone, destination_path))
-    return True
-
-
-def delete_vcf_files(device):
-    """Deletes all files with .vcf extension
-    """
-    files = device.adb.shell("ls {}".format(STORAGE_PATH))
-    for file_name in files.split():
-        if ".vcf" in file_name:
-            device.adb.shell("rm -f {}{}".format(STORAGE_PATH, file_name))
-
-
-def erase_contacts(device):
-    """Erase all contacts out of devices contact database.
-    """
-    device.log.info("Erasing contacts.")
-    if get_contact_count(device) > 0:
-        device.droid.contactsEraseAll()
-        try:
-            device.ed.pop_event(CONTACTS_ERASED_CALLBACK, PBAP_SYNC_TIME)
-        except queue.Empty:
-            log.error("Phone book not empty.")
-            return False
-    return True
-
-
-def wait_for_phone_number_update_complete(device, expected_count):
-    """Check phone_number count on device and wait for updates until it has the
-    expected number of phone numbers in its contact database.
-    """
-    update_completed = True
-    try:
-        while (expected_count != get_contact_count(device) and
-               device.ed.pop_event(CONTACTS_CHANGED_CALLBACK, PBAP_SYNC_TIME)):
-            pass
-    except queue.Empty:
-        log.error("Contacts failed to update.")
-        update_completed = False
-    device.log.info("Found {} out of the expected {} contacts.".format(
-        get_contact_count(device), expected_count))
-    return update_completed
-
-
-def wait_for_call_log_update_complete(device, expected_count):
-    """Check call log count on device and wait for updates until it has the
-    expected number of calls in its call log database.
-    """
-    update_completed = True
-    try:
-        while (expected_count != device.droid.callLogGetCount() and
-               device.ed.pop_event(CALL_LOG_CHANGED, PBAP_SYNC_TIME)):
-            pass
-    except queue.Empty:
-        log.error("Call Log failed to update.")
-        update_completed = False
-    device.log.info("Found {} out of the expected {} call logs.".format(
-        device.droid.callLogGetCount(), expected_count))
-    return
-
-
-def add_call_log(device, call_log_type, phone_number, call_time):
-    """Add call number and time to specified log.
-    """
-    new_call_log = {}
-    new_call_log["type"] = str(call_log_type)
-    new_call_log["number"] = phone_number
-    new_call_log["time"] = str(call_time)
-    device.droid.callLogsPut(new_call_log)
-
-
-def get_and_compare_call_logs(pse, pce, call_log_type):
-    """Gather and compare call logs from PSE and PCE for the specified type.
-    """
-    pse_call_log = pse.droid.callLogsGet(call_log_type)
-    pce_call_log = pce.droid.callLogsGet(call_log_type)
-    return compare_call_logs(pse_call_log, pce_call_log)
-
-
-def normalize_phonenumber(phone_number):
-    """Remove all non-digits from phone_number
-    """
-    return re.sub(r"\D", "", phone_number)
-
-
-def compare_call_logs(pse_call_log, pce_call_log):
-    """Gather and compare call logs from PSE and PCE for the specified type.
-    """
-    call_logs_match = True
-    if len(pse_call_log) == len(pce_call_log):
-        for i in range(len(pse_call_log)):
-            # Compare the phone number
-            if normalize_phonenumber(pse_call_log[i][
-                    "number"]) != normalize_phonenumber(pce_call_log[i][
-                         "number"]):
-                log.warning("Call Log numbers differ")
-                call_logs_match = False
-
-            # Compare which log it was taken from (Incomming, Outgoing, Missed
-            if pse_call_log[i]["type"] != pce_call_log[i]["type"]:
-                log.warning("Call Log types differ")
-                call_logs_match = False
-
-            # Compare time to truncated second.
-            if int(pse_call_log[i]["date"]) // 1000 != int(pce_call_log[i][
-                    "date"]) // 1000:
-                log.warning("Call log times don't match, check timezone.")
-                call_logs_match = False
-
-    else:
-        log.warning("Call Log lengths differ {}:{}".format(
-            len(pse_call_log), len(pce_call_log)))
-        call_logs_match = False
-
-    if not call_logs_match:
-        log.info("PSE Call Log:")
-        log.info(pse_call_log)
-        log.info("PCE Call Log:")
-        log.info(pce_call_log)
-
-    return call_logs_match
-
diff --git a/src/antlion/test_utils/bt/bt_factory.py b/src/antlion/test_utils/bt/bt_factory.py
deleted file mode 100644
index e085848..0000000
--- a/src/antlion/test_utils/bt/bt_factory.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import importlib
-
-
-def create(configs):
-    """Used to create instance of bt implementation.
-
-    A list of of configuration is extracted from configs.
-    The modules names are extracted and passed to import_module
-    to get the specific implementation, which gets appended to a
-    device list.
-    Args:
-        configs: A configurations dictionary that contains
-        a list of configs for each device in configs['user_params']['BtDevice'].
-
-    Returns:
-        A list of bt implementations.
-    """
-    bt_devices = []
-    for config in configs:
-        bt_name = config['bt_module']
-        bt = importlib.import_module('acts_contrib.test_utils.bt.bt_implementations.%s'
-                                      % bt_name)
-        bt_devices.append(bt.BluethoothDevice(config))
-    return bt_devices
-
-
-def destroy(bt_device_list):
-    for bt in bt_device_list:
-        bt.close()
diff --git a/src/antlion/test_utils/bt/bt_gatt_utils.py b/src/antlion/test_utils/bt/bt_gatt_utils.py
deleted file mode 100644
index 0347d26..0000000
--- a/src/antlion/test_utils/bt/bt_gatt_utils.py
+++ /dev/null
@@ -1,418 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from antlion.test_utils.bt.bt_test_utils import BtTestUtilsError
-from antlion.test_utils.bt.bt_test_utils import get_mac_address_of_generic_advertisement
-from antlion.test_utils.bt.bt_constants import gatt_cb_err
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_connection_state
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_phy_mask
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import gatt_transport
-import pprint
-from queue import Empty
-
-default_timeout = 10
-log = logging
-
-
-class GattTestUtilsError(Exception):
-    pass
-
-
-def setup_gatt_connection(cen_ad,
-                          mac_address,
-                          autoconnect,
-                          transport=gatt_transport['auto'],
-                          opportunistic=False):
-    gatt_callback = cen_ad.droid.gattCreateGattCallback()
-    log.info("Gatt Connect to mac address {}.".format(mac_address))
-    bluetooth_gatt = cen_ad.droid.gattClientConnectGatt(
-        gatt_callback, mac_address, autoconnect, transport, opportunistic,
-        gatt_phy_mask['1m_mask'])
-    expected_event = gatt_cb_strings['gatt_conn_change'].format(gatt_callback)
-    try:
-        event = cen_ad.ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        close_gatt_client(cen_ad, bluetooth_gatt)
-        raise GattTestUtilsError(
-            "Could not establish a connection to "
-            "peripheral. Expected event: {}".format(expected_event))
-    if event['data']['State'] != gatt_connection_state['connected']:
-        close_gatt_client(cen_ad, bluetooth_gatt)
-        try:
-            cen_ad.droid.gattClientClose(bluetooth_gatt)
-        except Exception:
-            self.log.debug("Failed to close gatt client.")
-        raise GattTestUtilsError("Could not establish a connection to "
-                                 "peripheral. Event Details: {}".format(
-                                     pprint.pformat(event)))
-    return bluetooth_gatt, gatt_callback
-
-
-def close_gatt_client(cen_ad, bluetooth_gatt):
-    try:
-        cen_ad.droid.gattClientClose(bluetooth_gatt)
-    except Exception:
-        log.debug("Failed to close gatt client.")
-
-
-def disconnect_gatt_connection(cen_ad, bluetooth_gatt, gatt_callback):
-    cen_ad.droid.gattClientDisconnect(bluetooth_gatt)
-    wait_for_gatt_disconnect_event(cen_ad, gatt_callback)
-    return
-
-
-def wait_for_gatt_disconnect_event(cen_ad, gatt_callback):
-    expected_event = gatt_cb_strings['gatt_conn_change'].format(gatt_callback)
-    try:
-        event = cen_ad.ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        raise GattTestUtilsError(
-            gatt_cb_err['gatt_conn_change_err'].format(expected_event))
-    found_state = event['data']['State']
-    expected_state = gatt_connection_state['disconnected']
-    if found_state != expected_state:
-        raise GattTestUtilsError(
-            "GATT connection state change expected {}, found {}".format(
-                expected_event, found_state))
-    return
-
-
-def orchestrate_gatt_connection(cen_ad,
-                                per_ad,
-                                transport=gatt_transport['le'],
-                                mac_address=None,
-                                autoconnect=False,
-                                opportunistic=False):
-    adv_callback = None
-    if mac_address is None:
-        if transport == gatt_transport['le']:
-            try:
-                mac_address, adv_callback, scan_callback = (
-                    get_mac_address_of_generic_advertisement(cen_ad, per_ad))
-            except BtTestUtilsError as err:
-                raise GattTestUtilsError(
-                    "Error in getting mac address: {}".format(err))
-        else:
-            mac_address = per_ad.droid.bluetoothGetLocalAddress()
-            adv_callback = None
-    bluetooth_gatt, gatt_callback = setup_gatt_connection(
-        cen_ad, mac_address, autoconnect, transport, opportunistic)
-    return bluetooth_gatt, gatt_callback, adv_callback
-
-
-def run_continuous_write_descriptor(cen_droid,
-                                    cen_ed,
-                                    per_droid,
-                                    per_ed,
-                                    gatt_server,
-                                    gatt_server_callback,
-                                    bluetooth_gatt,
-                                    services_count,
-                                    discovered_services_index,
-                                    number_of_iterations=100000):
-    log.info("Starting continuous write")
-    bt_device_id = 0
-    status = 1
-    offset = 1
-    test_value = [1, 2, 3, 4, 5, 6, 7]
-    test_value_return = [1, 2, 3]
-    for _ in range(number_of_iterations):
-        try:
-            for i in range(services_count):
-                characteristic_uuids = (
-                    cen_droid.gattClientGetDiscoveredCharacteristicUuids(
-                        discovered_services_index, i))
-                log.info(characteristic_uuids)
-                for characteristic in characteristic_uuids:
-                    descriptor_uuids = (
-                        cen_droid.gattClientGetDiscoveredDescriptorUuids(
-                            discovered_services_index, i, characteristic))
-                    log.info(descriptor_uuids)
-                    for descriptor in descriptor_uuids:
-                        cen_droid.gattClientDescriptorSetValue(
-                            bluetooth_gatt, discovered_services_index, i,
-                            characteristic, descriptor, test_value)
-                        cen_droid.gattClientWriteDescriptor(
-                            bluetooth_gatt, discovered_services_index, i,
-                            characteristic, descriptor)
-                        expected_event = gatt_cb_strings[
-                            'desc_write_req'].format(gatt_server_callback)
-                        try:
-                            event = per_ed.pop_event(expected_event,
-                                                     default_timeout)
-                        except Empty:
-                            log.error(gatt_cb_err['desc_write_req_err'].format(
-                                expected_event))
-                            return False
-                        request_id = event['data']['requestId']
-                        found_value = event['data']['value']
-                        if found_value != test_value:
-                            log.error(
-                                "Values didn't match. Found: {}, Expected: "
-                                "{}".format(found_value, test_value))
-                        per_droid.gattServerSendResponse(
-                            gatt_server, bt_device_id, request_id, status,
-                            offset, test_value_return)
-                        expected_event = gatt_cb_strings['desc_write'].format(
-                            bluetooth_gatt)
-                        try:
-                            cen_ed.pop_event(expected_event, default_timeout)
-                        except Empty:
-                            log.error(gatt_cb_strings['desc_write_err'].format(
-                                expected_event))
-                            raise Exception("Thread ended prematurely.")
-        except Exception as err:
-            log.error("Continuing but found exception: {}".format(err))
-
-
-def setup_characteristics_and_descriptors(droid):
-    characteristic_input = [
-        {
-            'uuid':
-            "aa7edd5a-4d1d-4f0e-883a-d145616a1630",
-            'property':
-            gatt_characteristic['property_write']
-            | gatt_characteristic['property_write_no_response'],
-            'permission':
-            gatt_characteristic['permission_write']
-        },
-        {
-            'uuid':
-            "21c0a0bf-ad51-4a2d-8124-b74003e4e8c8",
-            'property':
-            gatt_characteristic['property_notify']
-            | gatt_characteristic['property_read'],
-            'permission':
-            gatt_characteristic['permission_read']
-        },
-        {
-            'uuid':
-            "6774191f-6ec3-4aa2-b8a8-cf830e41fda6",
-            'property':
-            gatt_characteristic['property_notify']
-            | gatt_characteristic['property_read'],
-            'permission':
-            gatt_characteristic['permission_read']
-        },
-    ]
-    descriptor_input = [{
-        'uuid':
-        "aa7edd5a-4d1d-4f0e-883a-d145616a1630",
-        'property':
-        gatt_descriptor['permission_read']
-        | gatt_descriptor['permission_write'],
-    }, {
-        'uuid':
-        "76d5ed92-ca81-4edb-bb6b-9f019665fb32",
-        'property':
-        gatt_descriptor['permission_read']
-        | gatt_characteristic['permission_write'],
-    }]
-    characteristic_list = setup_gatt_characteristics(droid,
-                                                     characteristic_input)
-    descriptor_list = setup_gatt_descriptors(droid, descriptor_input)
-    return characteristic_list, descriptor_list
-
-
-def setup_multiple_services(per_ad):
-    per_droid, per_ed = per_ad.droid, per_ad.ed
-    gatt_server_callback = per_droid.gattServerCreateGattServerCallback()
-    gatt_server = per_droid.gattServerOpenGattServer(gatt_server_callback)
-    characteristic_list, descriptor_list = (
-        setup_characteristics_and_descriptors(per_droid))
-    per_droid.gattServerCharacteristicAddDescriptor(characteristic_list[1],
-                                                    descriptor_list[0])
-    per_droid.gattServerCharacteristicAddDescriptor(characteristic_list[2],
-                                                    descriptor_list[1])
-    gattService = per_droid.gattServerCreateService(
-        "00000000-0000-1000-8000-00805f9b34fb", gatt_service_types['primary'])
-    gattService2 = per_droid.gattServerCreateService(
-        "FFFFFFFF-0000-1000-8000-00805f9b34fb", gatt_service_types['primary'])
-    gattService3 = per_droid.gattServerCreateService(
-        "3846D7A0-69C8-11E4-BA00-0002A5D5C51B", gatt_service_types['primary'])
-    for characteristic in characteristic_list:
-        per_droid.gattServerAddCharacteristicToService(gattService,
-                                                       characteristic)
-    per_droid.gattServerAddService(gatt_server, gattService)
-    expected_event = gatt_cb_strings['serv_added'].format(gatt_server_callback)
-    try:
-        per_ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        per_ad.droid.gattServerClose(gatt_server)
-        raise GattTestUtilsError(
-            gatt_cb_strings['serv_added_err'].format(expected_event))
-    for characteristic in characteristic_list:
-        per_droid.gattServerAddCharacteristicToService(gattService2,
-                                                       characteristic)
-    per_droid.gattServerAddService(gatt_server, gattService2)
-    try:
-        per_ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        per_ad.droid.gattServerClose(gatt_server)
-        raise GattTestUtilsError(
-            gatt_cb_strings['serv_added_err'].format(expected_event))
-    for characteristic in characteristic_list:
-        per_droid.gattServerAddCharacteristicToService(gattService3,
-                                                       characteristic)
-    per_droid.gattServerAddService(gatt_server, gattService3)
-    try:
-        per_ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        per_ad.droid.gattServerClose(gatt_server)
-        raise GattTestUtilsError(
-            gatt_cb_strings['serv_added_err'].format(expected_event))
-    return gatt_server_callback, gatt_server
-
-
-def setup_characteristics_and_descriptors(droid):
-    characteristic_input = [
-        {
-            'uuid':
-            "aa7edd5a-4d1d-4f0e-883a-d145616a1630",
-            'property':
-            gatt_characteristic['property_write']
-            | gatt_characteristic['property_write_no_response'],
-            'permission':
-            gatt_characteristic['property_write']
-        },
-        {
-            'uuid':
-            "21c0a0bf-ad51-4a2d-8124-b74003e4e8c8",
-            'property':
-            gatt_characteristic['property_notify']
-            | gatt_characteristic['property_read'],
-            'permission':
-            gatt_characteristic['permission_read']
-        },
-        {
-            'uuid':
-            "6774191f-6ec3-4aa2-b8a8-cf830e41fda6",
-            'property':
-            gatt_characteristic['property_notify']
-            | gatt_characteristic['property_read'],
-            'permission':
-            gatt_characteristic['permission_read']
-        },
-    ]
-    descriptor_input = [{
-        'uuid':
-        "aa7edd5a-4d1d-4f0e-883a-d145616a1630",
-        'property':
-        gatt_descriptor['permission_read']
-        | gatt_descriptor['permission_write'],
-    }, {
-        'uuid':
-        "76d5ed92-ca81-4edb-bb6b-9f019665fb32",
-        'property':
-        gatt_descriptor['permission_read']
-        | gatt_characteristic['permission_write'],
-    }]
-    characteristic_list = setup_gatt_characteristics(droid,
-                                                     characteristic_input)
-    descriptor_list = setup_gatt_descriptors(droid, descriptor_input)
-    return characteristic_list, descriptor_list
-
-
-def setup_gatt_characteristics(droid, input):
-    characteristic_list = []
-    for item in input:
-        index = droid.gattServerCreateBluetoothGattCharacteristic(
-            item['uuid'], item['property'], item['permission'])
-        characteristic_list.append(index)
-    return characteristic_list
-
-
-def setup_gatt_descriptors(droid, input):
-    descriptor_list = []
-    for item in input:
-        index = droid.gattServerCreateBluetoothGattDescriptor(
-            item['uuid'],
-            item['property'],
-        )
-        descriptor_list.append(index)
-    log.info("setup descriptor list: {}".format(descriptor_list))
-    return descriptor_list
-
-
-def setup_gatt_mtu(cen_ad, bluetooth_gatt, gatt_callback, mtu):
-    """utility function to set mtu for GATT connection.
-
-    Steps:
-    1. Request mtu change.
-    2. Check if the mtu is changed to the new value
-
-    Args:
-        cen_ad: test device for client to scan.
-        bluetooth_gatt: GATT object
-        mtu: new mtu value to be set
-
-    Returns:
-        If success, return True.
-        if fail, return False
-    """
-    cen_ad.droid.gattClientRequestMtu(bluetooth_gatt, mtu)
-    expected_event = gatt_cb_strings['mtu_changed'].format(gatt_callback)
-    try:
-        mtu_event = cen_ad.ed.pop_event(expected_event, default_timeout)
-        mtu_size_found = mtu_event['data']['MTU']
-        if mtu_size_found != mtu:
-            log.error("MTU size found: {}, expected: {}".format(
-                mtu_size_found, mtu))
-            return False
-    except Empty:
-        log.error(gatt_cb_err['mtu_changed_err'].format(expected_event))
-        return False
-    return True
-
-
-def log_gatt_server_uuids(cen_ad,
-                          discovered_services_index,
-                          bluetooth_gatt=None):
-    services_count = cen_ad.droid.gattClientGetDiscoveredServicesCount(
-        discovered_services_index)
-    for i in range(services_count):
-        service = cen_ad.droid.gattClientGetDiscoveredServiceUuid(
-            discovered_services_index, i)
-        log.info("Discovered service uuid {}".format(service))
-        characteristic_uuids = (
-            cen_ad.droid.gattClientGetDiscoveredCharacteristicUuids(
-                discovered_services_index, i))
-        for j in range(len(characteristic_uuids)):
-            descriptor_uuids = (
-                cen_ad.droid.gattClientGetDiscoveredDescriptorUuidsByIndex(
-                    discovered_services_index, i, j))
-            if bluetooth_gatt:
-                char_inst_id = cen_ad.droid.gattClientGetCharacteristicInstanceId(
-                    bluetooth_gatt, discovered_services_index, i, j)
-                log.info("Discovered characteristic handle uuid: {} {}".format(
-                    hex(char_inst_id), characteristic_uuids[j]))
-                for k in range(len(descriptor_uuids)):
-                    desc_inst_id = cen_ad.droid.gattClientGetDescriptorInstanceId(
-                        bluetooth_gatt, discovered_services_index, i, j, k)
-                    log.info("Discovered descriptor handle uuid: {} {}".format(
-                        hex(desc_inst_id), descriptor_uuids[k]))
-            else:
-                log.info("Discovered characteristic uuid: {}".format(
-                    characteristic_uuids[j]))
-                for k in range(len(descriptor_uuids)):
-                    log.info("Discovered descriptor uuid {}".format(
-                        descriptor_uuids[k]))
diff --git a/src/antlion/test_utils/bt/bt_implementations/bt_stub.py b/src/antlion/test_utils/bt/bt_implementations/bt_stub.py
deleted file mode 100644
index 7166379..0000000
--- a/src/antlion/test_utils/bt/bt_implementations/bt_stub.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python3
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""A stub implementation of a DUT interface.
-
-This a stub interface which allows automated test to run
-without automating the hardware. This here for two reasons, first
-as an example of how to write a dut implementation, and second as
-an implementation that can be used to test case without writing
-out the full implementation.
-"""
-
-import logging
-
-class BluethoothDevice:
-    """The api interface used in the test for the stub.
-
-    This is interface which defines all the functions that can be
-    called by the bt test suite.
-    """
-
-    def __init__(self, config):
-        print('Init Stub with ', config)
-        logging.info('Init Stub with '+str(config))
-
-    def answer_phone(self):
-        input('Answer the phone and then press enter\n')
-
-    def hang_up(self):
-        input('Hang up the phone and then press enter\n')
-
-    def toggle_pause(self):
-        input('Press pause on device then press enter\n')
-
-    def volume(self, direction):
-        """Adjust the volume specified by the value of direction.
-
-        Args:
-            direction: A string that is either UP or DOWN
-            that indicates which way to adjust the volume.
-        """
-
-        return input('move volume '+direction+' and then press enter\n')
-
-    def connect(self, android):
-        input('Connect device and press enter\n')
-
-    def is_bt_connected(self):
-        con = input('Is device connected? y/n').lower()
-        while con not in ['y', 'n']:
-            con = input('Is device connected? y/n').lower()
-        return con == 'y'
-
-    def close(self):
-        """This where the hardware is released.
-        """
-        print('Close Stub')
-        logging.info('Close Stub')
-
diff --git a/src/antlion/test_utils/bt/bt_metrics_utils.py b/src/antlion/test_utils/bt/bt_metrics_utils.py
deleted file mode 100644
index 9464737..0000000
--- a/src/antlion/test_utils/bt/bt_metrics_utils.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-def get_bluetooth_profile_connection_stats_map(bluetooth_log):
-    return project_pairs_list_to_map(bluetooth_log.profile_connection_stats,
-                                     lambda stats : stats.profile_id,
-                                     lambda stats : stats.num_times_connected,
-                                     lambda a, b : a + b)
-
-def get_bluetooth_headset_profile_connection_stats_map(bluetooth_log):
-    return project_pairs_list_to_map(bluetooth_log.headset_profile_connection_stats,
-                                     lambda stats : stats.profile_id,
-                                     lambda stats : stats.num_times_connected,
-                                     lambda a, b : a + b)
-
-def project_pairs_list_to_map(pairs_list, get_key, get_value, merge_value):
-    """
-    Project a list of pairs (A, B) into a map of [A] --> B
-    :param pairs_list:  list of pairs (A, B)
-    :param get_key: function used to get key from pair (A, B)
-    :param get_value: function used to get value from pair (A, B)
-    :param merge_value: function used to merge values of B
-    :return: a map of [A] --> B
-    """
-    result = {}
-    for item in pairs_list:
-        my_key = get_key(item)
-        if my_key in result:
-            result[my_key] = merge_value(result[my_key], get_value(item))
-        else:
-            result[my_key] = get_value(item)
-    return result
diff --git a/src/antlion/test_utils/bt/bt_power_test_utils.py b/src/antlion/test_utils/bt/bt_power_test_utils.py
deleted file mode 100644
index f76bd6b..0000000
--- a/src/antlion/test_utils/bt/bt_power_test_utils.py
+++ /dev/null
@@ -1,185 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-import antlion.test_utils.bt.BleEnum as bleenum
-import antlion.test_utils.instrumentation.device.command.instrumentation_command_builder as icb
-
-BLE_LOCATION_SCAN_ENABLE = 'settings put global ble_scan_always_enabled 1'
-BLE_LOCATION_SCAN_DISABLE = 'settings put global ble_scan_always_enabled 0'
-START_BLE_ADV = 'am start -n com.google.bletesting/.ActsCommandExecutor --es command ADVERTISE#2#2#30000'
-START_BLE_SCAN = 'am start -n com.google.bletesting/.ActsCommandExecutor --es command SCAN#2#10000'
-SCAN_DURATION = 10
-SCREEN_WAIT_TIME = 1
-
-
-class MediaControl(object):
-    """Media control using adb shell for power testing.
-
-    Object to control media play status using adb.
-    """
-    def __init__(self, android_device, music_file):
-        """Initialize the media_control class.
-
-        Args:
-            android_dut: android_device object
-            music_file: location of the music file
-        """
-        self.android_device = android_device
-        self.music_file = music_file
-
-    def player_on_foreground(self):
-        """Turn on screen and make sure media play is on foreground
-
-        All media control keycode only works when screen is on and media player
-        is on the foreground. Turn off screen first and turn it on to make sure
-        all operation is based on the same screen status. Otherwise, 'MENU' key
-        would block command to be sent.
-        """
-        self.android_device.droid.goToSleepNow()
-        time.sleep(SCREEN_WAIT_TIME)
-        self.android_device.droid.wakeUpNow()
-        time.sleep(SCREEN_WAIT_TIME)
-        self.android_device.send_keycode('MENU')
-        time.sleep(SCREEN_WAIT_TIME)
-
-    def play(self):
-        """Start playing music.
-
-        """
-        self.player_on_foreground()
-        PLAY = 'am start -a android.intent.action.VIEW -d file://{} -t audio/wav'.format(
-            self.music_file)
-        self.android_device.adb.shell(PLAY)
-
-    def pause(self):
-        """Pause music.
-
-        """
-        self.player_on_foreground()
-        self.android_device.send_keycode('MEDIA_PAUSE')
-
-    def resume(self):
-        """Pause music.
-
-        """
-        self.player_on_foreground()
-        self.android_device.send_keycode('MEDIA_PLAY')
-
-    def stop(self):
-        """Stop music and close media play.
-
-        """
-        self.player_on_foreground()
-        self.android_device.send_keycode('MEDIA_STOP')
-
-
-def start_apk_ble_adv(dut, adv_mode, adv_power_level, adv_duration):
-    """Trigger BLE advertisement from power-test.apk.
-
-    Args:
-        dut: Android device under test, type AndroidDevice obj
-        adv_mode: The BLE advertisement mode.
-            {0: 'LowPower', 1: 'Balanced', 2: 'LowLatency'}
-        adv_power_leve: The BLE advertisement TX power level.
-            {0: 'UltraLowTXPower', 1: 'LowTXPower', 2: 'MediumTXPower,
-            3: HighTXPower}
-        adv_duration: duration of advertisement in seconds, type int
-    """
-
-    adv_duration = str(adv_duration) + 's'
-    builder = icb.InstrumentationTestCommandBuilder.default()
-    builder.add_test_class(
-        "com.google.android.device.power.tests.ble.BleAdvertise")
-    builder.set_manifest_package("com.google.android.device.power")
-    builder.set_runner("androidx.test.runner.AndroidJUnitRunner")
-    builder.add_key_value_param("cool-off-duration", "0s")
-    builder.add_key_value_param("idle-duration", "0s")
-    builder.add_key_value_param(
-        "com.android.test.power.receiver.ADVERTISE_MODE", adv_mode)
-    builder.add_key_value_param("com.android.test.power.receiver.POWER_LEVEL",
-                                adv_power_level)
-    builder.add_key_value_param(
-        "com.android.test.power.receiver.ADVERTISING_DURATION", adv_duration)
-
-    adv_command = builder.build() + ' &'
-    logging.info('Start BLE {} at {} for {} seconds'.format(
-        bleenum.AdvertiseSettingsAdvertiseMode(adv_mode).name,
-        bleenum.AdvertiseSettingsAdvertiseTxPower(adv_power_level).name,
-        adv_duration))
-    dut.adb.shell_nb(adv_command)
-
-
-def start_apk_ble_scan(dut, scan_mode, scan_duration):
-    """Build the command to trigger BLE scan from power-test.apk.
-
-    Args:
-        dut: Android device under test, type AndroidDevice obj
-        scan_mode: The BLE scan mode.
-            {0: 'LowPower', 1: 'Balanced', 2: 'LowLatency', -1: 'Opportunistic'}
-        scan_duration: duration of scan in seconds, type int
-    Returns:
-        adv_command: the command for BLE scan
-    """
-    scan_duration = str(scan_duration) + 's'
-    builder = icb.InstrumentationTestCommandBuilder.default()
-    builder.set_proto_path()
-    builder.add_flag('--no-isolated-storage')
-    builder.add_test_class("com.google.android.device.power.tests.ble.BleScan")
-    builder.set_manifest_package("com.google.android.device.power")
-    builder.set_runner("androidx.test.runner.AndroidJUnitRunner")
-    builder.add_key_value_param("cool-off-duration", "0s")
-    builder.add_key_value_param("idle-duration", "0s")
-    builder.add_key_value_param("com.android.test.power.receiver.SCAN_MODE",
-                                scan_mode)
-    builder.add_key_value_param("com.android.test.power.receiver.MATCH_MODE",
-                                2)
-    builder.add_key_value_param(
-        "com.android.test.power.receiver.SCAN_DURATION", scan_duration)
-    builder.add_key_value_param(
-        "com.android.test.power.receiver.CALLBACK_TYPE", 1)
-    builder.add_key_value_param("com.android.test.power.receiver.FILTER",
-                                'true')
-
-    scan_command = builder.build() + ' &'
-    logging.info('Start BLE {} scans for {} seconds'.format(
-        bleenum.ScanSettingsScanMode(scan_mode).name, scan_duration))
-    dut.adb.shell_nb(scan_command)
-
-
-def establish_ble_connection(client_ad, server_ad, con_priority):
-    """Establish BLE connection using BLE_Test.apk.
-
-    Args:
-        client_ad: the Android device performing the BLE connection.
-        server_ad: the Android device accepting the BLE connection.
-        con_priority: The BLE Connection Priority.
-            {0: 'BALANCED'= Use the connection parameters recommended by the Bluetooth SIG,
-            1: 'HIGH'= Request a high priority, low latency connection,
-            2: 'LOW_POWER= Request low power, reduced data rate connection parameters }
-    """
-    server_ad.adb.shell(START_BLE_ADV)
-    time.sleep(5)
-    client_ad.adb.shell(START_BLE_SCAN)
-    time.sleep(SCAN_DURATION)
-    logging.info("Connection Priority is:{}".format(con_priority))
-    client_ad.adb.shell(
-        'am start -n com.google.bletesting/.ActsCommandExecutor '
-        '--es command GATTCONNECT#{}'.format(con_priority))
-    logging.info(
-        "BLE Connection Successful with Connection Priority:{}".format(
-            con_priority))
diff --git a/src/antlion/test_utils/bt/bt_test_utils.py b/src/antlion/test_utils/bt/bt_test_utils.py
deleted file mode 100644
index 2152c82..0000000
--- a/src/antlion/test_utils/bt/bt_test_utils.py
+++ /dev/null
@@ -1,1922 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import random
-import re
-import string
-import threading
-import time
-try:
-    import pandas as pd
-except ModuleNotFoundError:
-    pass
-from queue import Empty
-from subprocess import call
-from antlion import asserts
-from antlion.test_utils.bt.bt_constants import adv_fail
-from antlion.test_utils.bt.bt_constants import adv_succ
-from antlion.test_utils.bt.bt_constants import batch_scan_not_supported_list
-from antlion.test_utils.bt.bt_constants import batch_scan_result
-from antlion.test_utils.bt.bt_constants import bits_per_samples
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_modes
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_tx_powers
-from antlion.test_utils.bt.bt_constants import bluetooth_a2dp_codec_config_changed
-from antlion.test_utils.bt.bt_constants import bluetooth_off
-from antlion.test_utils.bt.bt_constants import bluetooth_on
-from antlion.test_utils.bt.bt_constants import \
-    bluetooth_profile_connection_state_changed
-from antlion.test_utils.bt.bt_constants import bluetooth_socket_conn_test_uuid
-from antlion.test_utils.bt.bt_constants import bt_default_timeout
-from antlion.test_utils.bt.bt_constants import bt_profile_constants
-from antlion.test_utils.bt.bt_constants import bt_profile_states
-from antlion.test_utils.bt.bt_constants import bt_rfcomm_uuids
-from antlion.test_utils.bt.bt_constants import bt_scan_mode_types
-from antlion.test_utils.bt.bt_constants import btsnoop_last_log_path_on_device
-from antlion.test_utils.bt.bt_constants import btsnoop_log_path_on_device
-from antlion.test_utils.bt.bt_constants import channel_modes
-from antlion.test_utils.bt.bt_constants import codec_types
-from antlion.test_utils.bt.bt_constants import default_bluetooth_socket_timeout_ms
-from antlion.test_utils.bt.bt_constants import default_rfcomm_timeout_ms
-from antlion.test_utils.bt.bt_constants import hid_id_keyboard
-from antlion.test_utils.bt.bt_constants import pairing_variant_passkey_confirmation
-from antlion.test_utils.bt.bt_constants import pan_connect_timeout
-from antlion.test_utils.bt.bt_constants import sample_rates
-from antlion.test_utils.bt.bt_constants import scan_result
-from antlion.test_utils.bt.bt_constants import sig_uuid_constants
-from antlion.test_utils.bt.bt_constants import small_timeout
-from antlion.utils import exe_cmd
-
-from antlion import utils
-
-log = logging
-
-advertisements_to_devices = {}
-
-
-class BtTestUtilsError(Exception):
-    pass
-
-
-def _add_android_device_to_dictionary(android_device, profile_list,
-                                      selector_dict):
-    """Adds the AndroidDevice and supported features to the selector dictionary
-
-    Args:
-        android_device: The Android device.
-        profile_list: The list of profiles the Android device supports.
-    """
-    for profile in profile_list:
-        if profile in selector_dict and android_device not in selector_dict[
-                profile]:
-            selector_dict[profile].append(android_device)
-        else:
-            selector_dict[profile] = [android_device]
-
-
-def bluetooth_enabled_check(ad, timeout_sec=5):
-    """Checks if the Bluetooth state is enabled, if not it will attempt to
-    enable it.
-
-    Args:
-        ad: The Android device list to enable Bluetooth on.
-        timeout_sec: number of seconds to wait for toggle to take effect.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    if not ad.droid.bluetoothCheckState():
-        ad.droid.bluetoothToggleState(True)
-        expected_bluetooth_on_event_name = bluetooth_on
-        try:
-            ad.ed.pop_event(expected_bluetooth_on_event_name,
-                            bt_default_timeout)
-        except Empty:
-            ad.log.info("Failed to toggle Bluetooth on(no broadcast received).")
-            # Try one more time to poke at the actual state.
-            if ad.droid.bluetoothCheckState():
-                ad.log.info(".. actual state is ON")
-                return True
-            ad.log.error(".. actual state is OFF")
-            return False
-    end_time = time.time() + timeout_sec
-    while not ad.droid.bluetoothCheckState() and time.time() < end_time:
-        time.sleep(1)
-    return ad.droid.bluetoothCheckState()
-
-
-def check_device_supported_profiles(droid):
-    """Checks for Android device supported profiles.
-
-    Args:
-        droid: The droid object to query.
-
-    Returns:
-        A dictionary of supported profiles.
-    """
-    profile_dict = {}
-    profile_dict['hid'] = droid.bluetoothHidIsReady()
-    profile_dict['hsp'] = droid.bluetoothHspIsReady()
-    profile_dict['a2dp'] = droid.bluetoothA2dpIsReady()
-    profile_dict['avrcp'] = droid.bluetoothAvrcpIsReady()
-    profile_dict['a2dp_sink'] = droid.bluetoothA2dpSinkIsReady()
-    profile_dict['hfp_client'] = droid.bluetoothHfpClientIsReady()
-    profile_dict['pbap_client'] = droid.bluetoothPbapClientIsReady()
-    return profile_dict
-
-
-def cleanup_scanners_and_advertisers(scn_android_device, scn_callback_list,
-                                     adv_android_device, adv_callback_list):
-    """Try to gracefully stop all scanning and advertising instances.
-
-    Args:
-        scn_android_device: The Android device that is actively scanning.
-        scn_callback_list: The scan callback id list that needs to be stopped.
-        adv_android_device: The Android device that is actively advertising.
-        adv_callback_list: The advertise callback id list that needs to be
-            stopped.
-    """
-    scan_droid, scan_ed = scn_android_device.droid, scn_android_device.ed
-    adv_droid = adv_android_device.droid
-    try:
-        for scan_callback in scn_callback_list:
-            scan_droid.bleStopBleScan(scan_callback)
-    except Exception as err:
-        scn_android_device.log.debug(
-            "Failed to stop LE scan... reseting Bluetooth. Error {}".format(
-                err))
-        reset_bluetooth([scn_android_device])
-    try:
-        for adv_callback in adv_callback_list:
-            adv_droid.bleStopBleAdvertising(adv_callback)
-    except Exception as err:
-        adv_android_device.log.debug(
-            "Failed to stop LE advertisement... reseting Bluetooth. Error {}".
-            format(err))
-        reset_bluetooth([adv_android_device])
-
-
-def clear_bonded_devices(ad):
-    """Clear bonded devices from the input Android device.
-
-    Args:
-        ad: the Android device performing the connection.
-    Returns:
-        True if clearing bonded devices was successful, false if unsuccessful.
-    """
-    bonded_device_list = ad.droid.bluetoothGetBondedDevices()
-    while bonded_device_list:
-        device_address = bonded_device_list[0]['address']
-        if not ad.droid.bluetoothUnbond(device_address):
-            log.error("Failed to unbond {} from {}".format(
-                device_address, ad.serial))
-            return False
-        log.info("Successfully unbonded {} from {}".format(
-            device_address, ad.serial))
-        #TODO: wait for BOND_STATE_CHANGED intent instead of waiting
-        time.sleep(1)
-
-        # If device was first connected using LE transport, after bonding it is
-        # accessible through it's LE address, and through it classic address.
-        # Unbonding it will unbond two devices representing different
-        # "addresses". Attempt to unbond such already unbonded devices will
-        # result in bluetoothUnbond returning false.
-        bonded_device_list = ad.droid.bluetoothGetBondedDevices()
-    return True
-
-
-def connect_phone_to_headset(android,
-                             headset,
-                             timeout=bt_default_timeout,
-                             connection_check_period=10):
-    """Connects android phone to bluetooth headset.
-    Headset object must have methods power_on and enter_pairing_mode,
-    and attribute mac_address.
-
-    Args:
-        android: AndroidDevice object with SL4A installed.
-        headset: Object with attribute mac_address and methods power_on and
-            enter_pairing_mode.
-        timeout: Seconds to wait for devices to connect.
-        connection_check_period: how often to check for connection once the
-            SL4A connect RPC has been sent.
-    Returns:
-        connected (bool): True if devices are paired and connected by end of
-        method. False otherwise.
-    """
-    headset_mac_address = headset.mac_address
-    connected = android.droid.audioIsBluetoothA2dpOn()
-    log.info('Devices connected before pair attempt: %s' % connected)
-    if not connected:
-        # Turn on headset and initiate pairing mode.
-        headset.enter_pairing_mode()
-        android.droid.bluetoothStartPairingHelper()
-    start_time = time.time()
-    # If already connected, skip pair and connect attempt.
-    while not connected and (time.time() - start_time < timeout):
-        bonded_info = android.droid.bluetoothGetBondedDevices()
-        connected_info = android.droid.bluetoothGetConnectedDevices()
-        if headset.mac_address not in [info["address"] for info in bonded_info]:
-            # Use SL4A to pair and connect with headset.
-            headset.enter_pairing_mode()
-            android.droid.bluetoothDiscoverAndBond(headset_mac_address)
-        elif headset.mac_address not in [
-                info["address"] for info in connected_info
-        ]:
-            #Device is bonded but not connected
-            android.droid.bluetoothConnectBonded(headset_mac_address)
-        else:
-            #Headset is connected, but A2DP profile is not
-            android.droid.bluetoothA2dpConnect(headset_mac_address)
-        log.info('Waiting for connection...')
-        time.sleep(connection_check_period)
-        # Check for connection.
-        connected = android.droid.audioIsBluetoothA2dpOn()
-    log.info('Devices connected after pair attempt: %s' % connected)
-    return connected
-
-
-def connect_pri_to_sec(pri_ad, sec_ad, profiles_set, attempts=2):
-    """Connects pri droid to secondary droid.
-
-    Args:
-        pri_ad: AndroidDroid initiating connection
-        sec_ad: AndroidDroid accepting connection
-        profiles_set: Set of profiles to be connected
-        attempts: Number of attempts to try until failure.
-
-    Returns:
-        Pass if True
-        Fail if False
-    """
-    device_addr = sec_ad.droid.bluetoothGetLocalAddress()
-    # Allows extra time for the SDP records to be updated.
-    time.sleep(2)
-    curr_attempts = 0
-    while curr_attempts < attempts:
-        log.info("connect_pri_to_sec curr attempt {} total {}".format(
-            curr_attempts, attempts))
-        if _connect_pri_to_sec(pri_ad, sec_ad, profiles_set):
-            return True
-        curr_attempts += 1
-    log.error("connect_pri_to_sec failed to connect after {} attempts".format(
-        attempts))
-    return False
-
-
-def _connect_pri_to_sec(pri_ad, sec_ad, profiles_set):
-    """Connects pri droid to secondary droid.
-
-    Args:
-        pri_ad: AndroidDroid initiating connection.
-        sec_ad: AndroidDroid accepting connection.
-        profiles_set: Set of profiles to be connected.
-
-    Returns:
-        True of connection is successful, false if unsuccessful.
-    """
-    # Check if we support all profiles.
-    supported_profiles = bt_profile_constants.values()
-    for profile in profiles_set:
-        if profile not in supported_profiles:
-            pri_ad.log.info("Profile {} is not supported list {}".format(
-                profile, supported_profiles))
-            return False
-
-    # First check that devices are bonded.
-    paired = False
-    for paired_device in pri_ad.droid.bluetoothGetBondedDevices():
-        if paired_device['address'] == \
-                sec_ad.droid.bluetoothGetLocalAddress():
-            paired = True
-            break
-
-    if not paired:
-        pri_ad.log.error("Not paired to {}".format(sec_ad.serial))
-        return False
-
-    # Now try to connect them, the following call will try to initiate all
-    # connections.
-    pri_ad.droid.bluetoothConnectBonded(sec_ad.droid.bluetoothGetLocalAddress())
-
-    end_time = time.time() + 10
-    profile_connected = set()
-    sec_addr = sec_ad.droid.bluetoothGetLocalAddress()
-    pri_ad.log.info("Profiles to be connected {}".format(profiles_set))
-    # First use APIs to check profile connection state
-    while (time.time() < end_time and
-           not profile_connected.issuperset(profiles_set)):
-        if (bt_profile_constants['headset_client'] not in profile_connected and
-                bt_profile_constants['headset_client'] in profiles_set):
-            if is_hfp_client_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['headset_client'])
-        if (bt_profile_constants['a2dp'] not in profile_connected and
-                bt_profile_constants['a2dp'] in profiles_set):
-            if is_a2dp_src_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['a2dp'])
-        if (bt_profile_constants['a2dp_sink'] not in profile_connected and
-                bt_profile_constants['a2dp_sink'] in profiles_set):
-            if is_a2dp_snk_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['a2dp_sink'])
-        if (bt_profile_constants['map_mce'] not in profile_connected and
-                bt_profile_constants['map_mce'] in profiles_set):
-            if is_map_mce_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['map_mce'])
-        if (bt_profile_constants['map'] not in profile_connected and
-                bt_profile_constants['map'] in profiles_set):
-            if is_map_mse_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['map'])
-        time.sleep(0.1)
-    # If APIs fail, try to find the connection broadcast receiver.
-    while not profile_connected.issuperset(profiles_set):
-        try:
-            profile_event = pri_ad.ed.pop_event(
-                bluetooth_profile_connection_state_changed,
-                bt_default_timeout + 10)
-            pri_ad.log.info("Got event {}".format(profile_event))
-        except Exception:
-            pri_ad.log.error("Did not get {} profiles left {}".format(
-                bluetooth_profile_connection_state_changed, profile_connected))
-            return False
-
-        profile = profile_event['data']['profile']
-        state = profile_event['data']['state']
-        device_addr = profile_event['data']['addr']
-        if state == bt_profile_states['connected'] and \
-                device_addr == sec_ad.droid.bluetoothGetLocalAddress():
-            profile_connected.add(profile)
-        pri_ad.log.info(
-            "Profiles connected until now {}".format(profile_connected))
-    # Failure happens inside the while loop. If we came here then we already
-    # connected.
-    return True
-
-
-def determine_max_advertisements(android_device):
-    """Determines programatically how many advertisements the Android device
-    supports.
-
-    Args:
-        android_device: The Android device to determine max advertisements of.
-
-    Returns:
-        The maximum advertisement count.
-    """
-    android_device.log.info(
-        "Determining number of maximum concurrent advertisements...")
-    advertisement_count = 0
-    bt_enabled = False
-    expected_bluetooth_on_event_name = bluetooth_on
-    if not android_device.droid.bluetoothCheckState():
-        android_device.droid.bluetoothToggleState(True)
-    try:
-        android_device.ed.pop_event(expected_bluetooth_on_event_name,
-                                    bt_default_timeout)
-    except Exception:
-        android_device.log.info(
-            "Failed to toggle Bluetooth on(no broadcast received).")
-        # Try one more time to poke at the actual state.
-        if android_device.droid.bluetoothCheckState() is True:
-            android_device.log.info(".. actual state is ON")
-        else:
-            android_device.log.error(
-                "Failed to turn Bluetooth on. Setting default advertisements to 1"
-            )
-            advertisement_count = -1
-            return advertisement_count
-    advertise_callback_list = []
-    advertise_data = android_device.droid.bleBuildAdvertiseData()
-    advertise_settings = android_device.droid.bleBuildAdvertiseSettings()
-    while (True):
-        advertise_callback = android_device.droid.bleGenBleAdvertiseCallback()
-        advertise_callback_list.append(advertise_callback)
-
-        android_device.droid.bleStartBleAdvertising(advertise_callback,
-                                                    advertise_data,
-                                                    advertise_settings)
-
-        regex = "(" + adv_succ.format(
-            advertise_callback) + "|" + adv_fail.format(
-                advertise_callback) + ")"
-        # wait for either success or failure event
-        evt = android_device.ed.pop_events(regex, bt_default_timeout,
-                                           small_timeout)
-        if evt[0]["name"] == adv_succ.format(advertise_callback):
-            advertisement_count += 1
-            android_device.log.info(
-                "Advertisement {} started.".format(advertisement_count))
-        else:
-            error = evt[0]["data"]["Error"]
-            if error == "ADVERTISE_FAILED_TOO_MANY_ADVERTISERS":
-                android_device.log.info(
-                    "Advertisement failed to start. Reached max " +
-                    "advertisements at {}".format(advertisement_count))
-                break
-            else:
-                raise BtTestUtilsError(
-                    "Expected ADVERTISE_FAILED_TOO_MANY_ADVERTISERS," +
-                    " but received bad error code {}".format(error))
-    try:
-        for adv in advertise_callback_list:
-            android_device.droid.bleStopBleAdvertising(adv)
-    except Exception:
-        android_device.log.error(
-            "Failed to stop advertisingment, resetting Bluetooth.")
-        reset_bluetooth([android_device])
-    return advertisement_count
-
-
-def disable_bluetooth(droid):
-    """Disable Bluetooth on input Droid object.
-
-    Args:
-        droid: The droid object to disable Bluetooth on.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    if droid.bluetoothCheckState() is True:
-        droid.bluetoothToggleState(False)
-        if droid.bluetoothCheckState() is True:
-            log.error("Failed to toggle Bluetooth off.")
-            return False
-    return True
-
-
-def disconnect_pri_from_sec(pri_ad, sec_ad, profiles_list):
-    """
-    Disconnect primary from secondary on a specific set of profiles
-    Args:
-        pri_ad - Primary android_device initiating disconnection
-        sec_ad - Secondary android droid (sl4a interface to keep the
-          method signature the same connect_pri_to_sec above)
-        profiles_list - List of profiles we want to disconnect from
-
-    Returns:
-        True on Success
-        False on Failure
-    """
-    # Sanity check to see if all the profiles in the given set is supported
-    supported_profiles = bt_profile_constants.values()
-    for profile in profiles_list:
-        if profile not in supported_profiles:
-            pri_ad.log.info("Profile {} is not in supported list {}".format(
-                profile, supported_profiles))
-            return False
-
-    pri_ad.log.info(pri_ad.droid.bluetoothGetBondedDevices())
-    # Disconnecting on a already disconnected profile is a nop,
-    # so not checking for the connection state
-    try:
-        pri_ad.droid.bluetoothDisconnectConnectedProfile(
-            sec_ad.droid.bluetoothGetLocalAddress(), profiles_list)
-    except Exception as err:
-        pri_ad.log.error(
-            "Exception while trying to disconnect profile(s) {}: {}".format(
-                profiles_list, err))
-        return False
-
-    profile_disconnected = set()
-    pri_ad.log.info("Disconnecting from profiles: {}".format(profiles_list))
-
-    while not profile_disconnected.issuperset(profiles_list):
-        try:
-            profile_event = pri_ad.ed.pop_event(
-                bluetooth_profile_connection_state_changed, bt_default_timeout)
-            pri_ad.log.info("Got event {}".format(profile_event))
-        except Exception as e:
-            pri_ad.log.error(
-                "Did not disconnect from Profiles. Reason {}".format(e))
-            return False
-
-        profile = profile_event['data']['profile']
-        state = profile_event['data']['state']
-        device_addr = profile_event['data']['addr']
-
-        if state == bt_profile_states['disconnected'] and \
-                device_addr == sec_ad.droid.bluetoothGetLocalAddress():
-            profile_disconnected.add(profile)
-        pri_ad.log.info(
-            "Profiles disconnected so far {}".format(profile_disconnected))
-
-    return True
-
-
-def enable_bluetooth(droid, ed):
-    if droid.bluetoothCheckState() is True:
-        return True
-
-    droid.bluetoothToggleState(True)
-    expected_bluetooth_on_event_name = bluetooth_on
-    try:
-        ed.pop_event(expected_bluetooth_on_event_name, bt_default_timeout)
-    except Exception:
-        log.info("Failed to toggle Bluetooth on (no broadcast received)")
-        if droid.bluetoothCheckState() is True:
-            log.info(".. actual state is ON")
-            return True
-        log.info(".. actual state is OFF")
-        return False
-
-    return True
-
-
-def factory_reset_bluetooth(android_devices):
-    """Clears Bluetooth stack of input Android device list.
-
-        Args:
-            android_devices: The Android device list to reset Bluetooth
-
-        Returns:
-            True if successful, false if unsuccessful.
-        """
-    for a in android_devices:
-        droid, ed = a.droid, a.ed
-        a.log.info("Reset state of bluetooth on device.")
-        if not bluetooth_enabled_check(a):
-            return False
-        # TODO: remove device unbond b/79418045
-        # Temporary solution to ensure all devices are unbonded
-        bonded_devices = droid.bluetoothGetBondedDevices()
-        for b in bonded_devices:
-            a.log.info("Removing bond for device {}".format(b['address']))
-            droid.bluetoothUnbond(b['address'])
-
-        droid.bluetoothFactoryReset()
-        wait_for_bluetooth_manager_state(droid)
-        if not enable_bluetooth(droid, ed):
-            return False
-    return True
-
-
-def generate_ble_advertise_objects(droid):
-    """Generate generic LE advertise objects.
-
-    Args:
-        droid: The droid object to generate advertise LE objects from.
-
-    Returns:
-        advertise_callback: The generated advertise callback id.
-        advertise_data: The generated advertise data id.
-        advertise_settings: The generated advertise settings id.
-    """
-    advertise_callback = droid.bleGenBleAdvertiseCallback()
-    advertise_data = droid.bleBuildAdvertiseData()
-    advertise_settings = droid.bleBuildAdvertiseSettings()
-    return advertise_callback, advertise_data, advertise_settings
-
-
-def generate_ble_scan_objects(droid):
-    """Generate generic LE scan objects.
-
-    Args:
-        droid: The droid object to generate LE scan objects from.
-
-    Returns:
-        filter_list: The generated scan filter list id.
-        scan_settings: The generated scan settings id.
-        scan_callback: The generated scan callback id.
-    """
-    filter_list = droid.bleGenFilterList()
-    scan_settings = droid.bleBuildScanSetting()
-    scan_callback = droid.bleGenScanCallback()
-    return filter_list, scan_settings, scan_callback
-
-
-def generate_id_by_size(size,
-                        chars=(string.ascii_lowercase + string.ascii_uppercase +
-                               string.digits)):
-    """Generate random ascii characters of input size and input char types
-
-    Args:
-        size: Input size of string.
-        chars: (Optional) Chars to use in generating a random string.
-
-    Returns:
-        String of random input chars at the input size.
-    """
-    return ''.join(random.choice(chars) for _ in range(size))
-
-
-def get_advanced_droid_list(android_devices):
-    """Add max_advertisement and batch_scan_supported attributes to input
-    Android devices
-
-    This will programatically determine maximum LE advertisements of each
-    input Android device.
-
-    Args:
-        android_devices: The Android devices to setup.
-
-    Returns:
-        List of Android devices with new attribtues.
-    """
-    droid_list = []
-    for a in android_devices:
-        d, e = a.droid, a.ed
-        model = d.getBuildModel()
-        max_advertisements = 1
-        batch_scan_supported = True
-        if model in advertisements_to_devices.keys():
-            max_advertisements = advertisements_to_devices[model]
-        else:
-            max_advertisements = determine_max_advertisements(a)
-            max_tries = 3
-            # Retry to calculate max advertisements
-            while max_advertisements == -1 and max_tries > 0:
-                a.log.info(
-                    "Attempts left to determine max advertisements: {}".format(
-                        max_tries))
-                max_advertisements = determine_max_advertisements(a)
-                max_tries -= 1
-            advertisements_to_devices[model] = max_advertisements
-        if model in batch_scan_not_supported_list:
-            batch_scan_supported = False
-        role = {
-            'droid': d,
-            'ed': e,
-            'max_advertisements': max_advertisements,
-            'batch_scan_supported': batch_scan_supported
-        }
-        droid_list.append(role)
-    return droid_list
-
-
-def get_bluetooth_crash_count(android_device):
-    out = android_device.adb.shell("dumpsys bluetooth_manager")
-    return int(re.search("crashed(.*\d)", out).group(1))
-
-
-def read_otp(ad):
-    """Reads and parses the OTP output to return TX power backoff
-
-    Reads the OTP registers from the phone, parses them to return a
-    dict of TX power backoffs for different power levels
-
-    Args:
-        ad : android device object
-
-    Returns :
-        otp_dict : power backoff dict
-    """
-
-    ad.adb.shell('svc bluetooth disable')
-    time.sleep(2)
-    otp_output = ad.adb.shell('bluetooth_sar_test -r')
-    ad.adb.shell('svc bluetooth enable')
-    time.sleep(2)
-    otp_dict = {
-        "BR": {
-            "10": 0,
-            "9": 0,
-            "8": 0
-        },
-        "EDR": {
-            "10": 0,
-            "9": 0,
-            "8": 0
-        },
-        "BLE": {
-            "10": 0,
-            "9": 0,
-            "8": 0
-        }
-    }
-
-    otp_regex = '\s+\[\s+PL10:\s+(\d+)\s+PL9:\s+(\d+)*\s+PL8:\s+(\d+)\s+\]'
-
-    for key in otp_dict:
-        bank_list = re.findall("{}{}".format(key, otp_regex), otp_output)
-        for bank_tuple in bank_list:
-            if ('0', '0', '0') != bank_tuple:
-                [otp_dict[key]["10"], otp_dict[key]["9"],
-                 otp_dict[key]["8"]] = bank_tuple
-    return otp_dict
-
-
-def get_bt_metric(ad_list,
-                  duration=1,
-                  bqr_tag='Monitoring , Handle:',
-                  tag='',
-                  log_path=False):
-    """ Function to get the bt metric from logcat.
-
-    Captures logcat for the specified duration and returns the bqr results.
-    Takes list of android objects as input. If a single android object is given,
-    converts it into a list.
-
-    Args:
-        ad_list: list of android_device objects
-        duration: time duration (seconds) for which the logcat is parsed
-        bqr_tag: tag of bt metrics
-        tag: tag to be appended to the metrics raw data
-        log_path: path of metrics raw data
-
-    Returns:
-        process_data: dict of process raw data for each android devices
-    """
-
-    # Defining bqr quantites and their regex to extract
-    regex_dict = {
-        "pwlv": "PwLv:\s(\S+)",
-        "rssi": "RSSI:\s[-](\d+)",
-        "rssi_c0": "RSSI_C0:\s[-](\d+)",
-        "rssi_c1": "RSSI_C1:\s[-](\d+)",
-        "txpw_c0": "\sTxPw_C0:\s(-?\d+)",
-        "txpw_c1": "\sTxPw_C1:\s(-?\d+)",
-        "bftx": "BFTx:\s(\w+)",
-        "divtx": "DivTx:\s(\w+)"
-    }
-    metrics_dict = {
-        "rssi": {},
-        "pwlv": {},
-        "rssi_c0": {},
-        "rssi_c1": {},
-        "txpw_c0": {},
-        "txpw_c1": {},
-        "bftx": {},
-        "divtx": {}
-    }
-
-    # Converting a single android device object to list
-    if not isinstance(ad_list, list):
-        ad_list = [ad_list]
-
-    #Time sync with the test machine
-    for ad in ad_list:
-        ad.droid.setTime(int(round(time.time() * 1000)))
-        time.sleep(0.5)
-
-    begin_time = utils.get_current_epoch_time()
-    time.sleep(duration)
-    end_time = utils.get_current_epoch_time()
-
-    for ad in ad_list:
-        bt_rssi_log = ad.cat_adb_log(tag + "_bt_metric", begin_time, end_time)
-
-        # Extracting supporting bqr quantities
-        for metric, regex in regex_dict.items():
-            bqr_metric = []
-            file_bt_log = open(bt_rssi_log, "r")
-            for line in file_bt_log:
-                if bqr_tag in line:
-                    if re.findall(regex, line):
-                        m = re.findall(regex, line)[0].strip(",")
-                        bqr_metric.append(m)
-            metrics_dict[metric][ad.serial] = bqr_metric
-            file_bt_log.close()
-
-        # Formatting and saving the raw data
-        metrics_to_be_formatted = [{
-            "name": "rssi",
-            "averagble": "y"
-        }, {
-            "name": "rssi_c0",
-            "averagble": "y"
-        }, {
-            "name": "rssi_c1",
-            "averagble": "y"
-        }, {
-            "name": "pwlv",
-            "averagble": "n"
-        }, {
-            "name": "txpw_c0",
-            "averagble": "n"
-        }, {
-            "name": "txpw_c1",
-            "averagble": "n"
-        }, {
-            "name": "bftx",
-            "averagble": "n"
-        }, {
-            "name": "divtx",
-            "averagble": "n"
-        }]
-        for metric in metrics_to_be_formatted:
-            if metric["averagble"] == "y":
-                metrics_dict[metric["name"]][ad.serial] = [
-                    (-1) * int(x)
-                    for x in metrics_dict[metric["name"]][ad.serial]
-                ]
-            else:
-                metrics_dict[metric["name"]][ad.serial] = [
-                    int(x, 16) if '0x' in x else int(x, 10)
-                    for x in metrics_dict[metric["name"]][ad.serial]
-                ]
-        # Saving metrics raw data for each attenuation
-        if log_path:
-            output_file_name = ad.serial + "_metrics_raw_data_" + tag + ".csv"
-            output_file = os.path.join(log_path, output_file_name)
-            os.makedirs(log_path, exist_ok=True)
-            df_save_metrics = {}
-            for item in metrics_dict.items():
-                df_save_metrics[item[0]] = next(iter(item[1].items()))[1]
-            MetricsDict_df = pd.DataFrame({key:pd.Series(value) for key, value in df_save_metrics.items()})
-            MetricsDict_df.to_csv(output_file)
-        # Defining the process_data_dict
-        process_data = {
-            "rssi": {},
-            "pwlv": {},
-            "rssi_c0": {},
-            "rssi_c1": {},
-            "txpw_c0": {},
-            "txpw_c1": {},
-            "bftx": {},
-            "divtx": {}
-        }
-
-        # Computing and returning the raw data
-        for metric in metrics_to_be_formatted:
-            if metric["averagble"] == "y":
-                process_data[metric["name"]][ad.serial] = [
-                    x for x in metrics_dict[metric["name"]][ad.serial]
-                    if x != 0 and x != -127
-                ]
-
-                try:
-                    #DOING AVERAGE
-                    process_data[metric["name"]][ad.serial] = round(
-                        sum(metrics_dict[metric["name"]][ad.serial]) /
-                        len(metrics_dict[metric["name"]][ad.serial]), 2)
-                except ZeroDivisionError:
-                    #SETTING VALUE TO 'n/a'
-                    process_data[metric["name"]][ad.serial] = "n/a"
-            else:
-                try:
-                    #GETTING MOST_COMMON_VALUE
-                    process_data[metric["name"]][ad.serial] = max(
-                        metrics_dict[metric["name"]][ad.serial],
-                        key=metrics_dict[metric["name"]][ad.serial].count)
-                except ValueError:
-                    #SETTING VALUE TO 'n/a'
-                    process_data[metric["name"]][ad.serial] = "n/a"
-
-    return process_data
-
-
-def get_bt_rssi(ad, duration=1, processed=True, tag='', log_path=False):
-    """Function to get average bt rssi from logcat.
-
-    This function returns the average RSSI for the given duration. RSSI values are
-    extracted from BQR.
-
-    Args:
-        ad: (list of) android_device object.
-        duration: time duration(seconds) for which logcat is parsed.
-
-    Returns:
-        avg_rssi: average RSSI on each android device for the given duration.
-    """
-    bqr_results = get_bt_metric(ad, duration, tag=tag, log_path=log_path)
-    return bqr_results["rssi"]
-
-
-def enable_bqr(
-    ad_list,
-    bqr_interval=10,
-    bqr_event_mask=15,
-):
-    """Sets up BQR reporting.
-
-       Sets up BQR to report BT metrics at the requested frequency and toggles
-       airplane mode for the bqr settings to take effect.
-
-    Args:
-        ad_list: an android_device or list of android devices.
-    """
-    # Converting a single android device object to list
-    if not isinstance(ad_list, list):
-        ad_list = [ad_list]
-
-    for ad in ad_list:
-        #Setting BQR parameters
-        ad.adb.shell("setprop persist.bluetooth.bqr.event_mask {}".format(
-            bqr_event_mask))
-        ad.adb.shell("setprop persist.bluetooth.bqr.min_interval_ms {}".format(
-            bqr_interval))
-
-        ## Toggle airplane mode
-        ad.droid.connectivityToggleAirplaneMode(True)
-        ad.droid.connectivityToggleAirplaneMode(False)
-
-
-def disable_bqr(ad_list):
-    """Disables BQR reporting.
-
-    Args:
-        ad_list: an android_device or list of android devices.
-    """
-    # Converting a single android device object to list
-    if not isinstance(ad_list, list):
-        ad_list = [ad_list]
-
-    DISABLE_BQR_MASK = 0
-
-    for ad in ad_list:
-        #Disabling BQR
-        ad.adb.shell("setprop persist.bluetooth.bqr.event_mask {}".format(
-            DISABLE_BQR_MASK))
-
-        ## Toggle airplane mode
-        ad.droid.connectivityToggleAirplaneMode(True)
-        ad.droid.connectivityToggleAirplaneMode(False)
-
-
-def get_device_selector_dictionary(android_device_list):
-    """Create a dictionary of Bluetooth features vs Android devices.
-
-    Args:
-        android_device_list: The list of Android devices.
-    Returns:
-        A dictionary of profiles/features to Android devices.
-    """
-    selector_dict = {}
-    for ad in android_device_list:
-        uuids = ad.droid.bluetoothGetLocalUuids()
-
-        for profile, uuid_const in sig_uuid_constants.items():
-            uuid_check = sig_uuid_constants['BASE_UUID'].format(
-                uuid_const).lower()
-            if uuids and uuid_check in uuids:
-                if profile in selector_dict:
-                    selector_dict[profile].append(ad)
-                else:
-                    selector_dict[profile] = [ad]
-
-        # Various services may not be active during BT startup.
-        # If the device can be identified through adb shell pm list features
-        # then try to add them to the appropriate profiles / features.
-
-        # Android TV.
-        if "feature:android.hardware.type.television" in ad.features:
-            ad.log.info("Android TV device found.")
-            supported_profiles = ['AudioSink']
-            _add_android_device_to_dictionary(ad, supported_profiles,
-                                              selector_dict)
-
-        # Android Auto
-        elif "feature:android.hardware.type.automotive" in ad.features:
-            ad.log.info("Android Auto device found.")
-            # Add: AudioSink , A/V_RemoteControl,
-            supported_profiles = [
-                'AudioSink', 'A/V_RemoteControl', 'Message Notification Server'
-            ]
-            _add_android_device_to_dictionary(ad, supported_profiles,
-                                              selector_dict)
-        # Android Wear
-        elif "feature:android.hardware.type.watch" in ad.features:
-            ad.log.info("Android Wear device found.")
-            supported_profiles = []
-            _add_android_device_to_dictionary(ad, supported_profiles,
-                                              selector_dict)
-        # Android Phone
-        elif "feature:android.hardware.telephony" in ad.features:
-            ad.log.info("Android Phone device found.")
-            # Add: AudioSink
-            supported_profiles = [
-                'AudioSource', 'A/V_RemoteControlTarget',
-                'Message Access Server'
-            ]
-            _add_android_device_to_dictionary(ad, supported_profiles,
-                                              selector_dict)
-    return selector_dict
-
-
-def get_mac_address_of_generic_advertisement(scan_ad, adv_ad):
-    """Start generic advertisement and get it's mac address by LE scanning.
-
-    Args:
-        scan_ad: The Android device to use as the scanner.
-        adv_ad: The Android device to use as the advertiser.
-
-    Returns:
-        mac_address: The mac address of the advertisement.
-        advertise_callback: The advertise callback id of the active
-            advertisement.
-    """
-    adv_ad.droid.bleSetAdvertiseDataIncludeDeviceName(True)
-    adv_ad.droid.bleSetAdvertiseSettingsAdvertiseMode(
-        ble_advertise_settings_modes['low_latency'])
-    adv_ad.droid.bleSetAdvertiseSettingsIsConnectable(True)
-    adv_ad.droid.bleSetAdvertiseSettingsTxPowerLevel(
-        ble_advertise_settings_tx_powers['high'])
-    advertise_callback, advertise_data, advertise_settings = (
-        generate_ble_advertise_objects(adv_ad.droid))
-    adv_ad.droid.bleStartBleAdvertising(advertise_callback, advertise_data,
-                                        advertise_settings)
-    try:
-        adv_ad.ed.pop_event(adv_succ.format(advertise_callback),
-                            bt_default_timeout)
-    except Empty as err:
-        raise BtTestUtilsError(
-            "Advertiser did not start successfully {}".format(err))
-    filter_list = scan_ad.droid.bleGenFilterList()
-    scan_settings = scan_ad.droid.bleBuildScanSetting()
-    scan_callback = scan_ad.droid.bleGenScanCallback()
-    scan_ad.droid.bleSetScanFilterDeviceName(
-        adv_ad.droid.bluetoothGetLocalName())
-    scan_ad.droid.bleBuildScanFilter(filter_list)
-    scan_ad.droid.bleStartBleScan(filter_list, scan_settings, scan_callback)
-    try:
-        event = scan_ad.ed.pop_event(
-            "BleScan{}onScanResults".format(scan_callback), bt_default_timeout)
-    except Empty as err:
-        raise BtTestUtilsError(
-            "Scanner did not find advertisement {}".format(err))
-    mac_address = event['data']['Result']['deviceInfo']['address']
-    return mac_address, advertise_callback, scan_callback
-
-
-def hid_device_send_key_data_report(host_id, device_ad, key, interval=1):
-    """Send a HID report simulating a 1-second keyboard press from host_ad to
-    device_ad
-
-    Args:
-        host_id: the Bluetooth MAC address or name of the HID host
-        device_ad: HID device
-        key: the key we want to send
-        interval: the interval between key press and key release
-    """
-    device_ad.droid.bluetoothHidDeviceSendReport(host_id, hid_id_keyboard,
-                                                 hid_keyboard_report(key))
-    time.sleep(interval)
-    device_ad.droid.bluetoothHidDeviceSendReport(host_id, hid_id_keyboard,
-                                                 hid_keyboard_report("00"))
-
-
-def hid_keyboard_report(key, modifier="00"):
-    """Get the HID keyboard report for the given key
-
-    Args:
-        key: the key we want
-        modifier: HID keyboard modifier bytes
-    Returns:
-        The byte array for the HID report.
-    """
-    return str(
-        bytearray.fromhex(" ".join(
-            [modifier, "00", key, "00", "00", "00", "00", "00"])), "utf-8")
-
-
-def is_a2dp_connected(sink, source):
-    """
-    Convenience Function to see if the 2 devices are connected on
-    A2dp.
-    Args:
-        sink:       Audio Sink
-        source:     Audio Source
-    Returns:
-        True if Connected
-        False if Not connected
-    """
-
-    devices = sink.droid.bluetoothA2dpSinkGetConnectedDevices()
-    for device in devices:
-        sink.log.info("A2dp Connected device {}".format(device["name"]))
-        if (device["address"] == source.droid.bluetoothGetLocalAddress()):
-            return True
-    return False
-
-
-def is_a2dp_snk_device_connected(ad, addr):
-    """Determines if an AndroidDevice has A2DP snk connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothA2dpSinkGetConnectedDevices()
-    ad.log.info("Connected A2DP Sink devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def is_a2dp_src_device_connected(ad, addr):
-    """Determines if an AndroidDevice has A2DP connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothA2dpGetConnectedDevices()
-    ad.log.info("Connected A2DP Source devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def is_hfp_client_device_connected(ad, addr):
-    """Determines if an AndroidDevice has HFP connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothHfpClientGetConnectedDevices()
-    ad.log.info("Connected HFP Client devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def is_map_mce_device_connected(ad, addr):
-    """Determines if an AndroidDevice has MAP MCE connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothMapClientGetConnectedDevices()
-    ad.log.info("Connected MAP MCE devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def is_map_mse_device_connected(ad, addr):
-    """Determines if an AndroidDevice has MAP MSE connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothMapGetConnectedDevices()
-    ad.log.info("Connected MAP MSE devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def kill_bluetooth_process(ad):
-    """Kill Bluetooth process on Android device.
-
-    Args:
-        ad: Android device to kill BT process on.
-    """
-    ad.log.info("Killing Bluetooth process.")
-    pid = ad.adb.shell(
-        "ps | grep com.android.bluetooth | awk '{print $2}'").decode('ascii')
-    call(["adb -s " + ad.serial + " shell kill " + pid], shell=True)
-
-
-def log_energy_info(android_devices, state):
-    """Logs energy info of input Android devices.
-
-    Args:
-        android_devices: input Android device list to log energy info from.
-        state: the input state to log. Usually 'Start' or 'Stop' for logging.
-
-    Returns:
-        A logging string of the Bluetooth energy info reported.
-    """
-    return_string = "{} Energy info collection:\n".format(state)
-    # Bug: b/31966929
-    return return_string
-
-
-def orchestrate_and_verify_pan_connection(pan_dut, panu_dut):
-    """Setups up a PAN conenction between two android devices.
-
-    Args:
-        pan_dut: the Android device providing tethering services
-        panu_dut: the Android device using the internet connection from the
-            pan_dut
-    Returns:
-        True if PAN connection and verification is successful,
-        false if unsuccessful.
-    """
-    pan_dut.droid.bluetoothStartConnectionStateChangeMonitor("")
-    panu_dut.droid.bluetoothStartConnectionStateChangeMonitor("")
-    if not bluetooth_enabled_check(panu_dut):
-        return False
-    if not bluetooth_enabled_check(pan_dut):
-        return False
-    pan_dut.droid.bluetoothPanSetBluetoothTethering(True)
-    if not (pair_pri_to_sec(pan_dut, panu_dut)):
-        return False
-    if not pan_dut.droid.bluetoothPanIsTetheringOn():
-        pan_dut.log.error("Failed to enable Bluetooth tethering.")
-        return False
-    # Magic sleep needed to give the stack time in between bonding and
-    # connecting the PAN profile.
-    time.sleep(pan_connect_timeout)
-    panu_dut.droid.bluetoothConnectBonded(
-        pan_dut.droid.bluetoothGetLocalAddress())
-    return True
-
-
-def orchestrate_bluetooth_socket_connection(
-        client_ad,
-        server_ad,
-        accept_timeout_ms=default_bluetooth_socket_timeout_ms,
-        uuid=None):
-    """Sets up the Bluetooth Socket connection between two Android devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    server_ad.droid.bluetoothStartPairingHelper()
-    client_ad.droid.bluetoothStartPairingHelper()
-
-    server_ad.droid.bluetoothSocketConnBeginAcceptThreadUuid(
-        (bluetooth_socket_conn_test_uuid if uuid is None else uuid),
-        accept_timeout_ms)
-    client_ad.droid.bluetoothSocketConnBeginConnectThreadUuid(
-        server_ad.droid.bluetoothGetLocalAddress(),
-        (bluetooth_socket_conn_test_uuid if uuid is None else uuid))
-
-    end_time = time.time() + bt_default_timeout
-    result = False
-    test_result = True
-    while time.time() < end_time:
-        if len(client_ad.droid.bluetoothSocketConnActiveConnections()) > 0:
-            test_result = True
-            client_ad.log.info("Bluetooth socket Client Connection Active")
-            break
-        else:
-            test_result = False
-        time.sleep(1)
-    if not test_result:
-        client_ad.log.error("Failed to establish a Bluetooth socket connection")
-        return False
-    return True
-
-
-def orchestrate_rfcomm_connection(client_ad,
-                                  server_ad,
-                                  accept_timeout_ms=default_rfcomm_timeout_ms,
-                                  uuid=None):
-    """Sets up the RFCOMM connection between two Android devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    result = orchestrate_bluetooth_socket_connection(
-        client_ad, server_ad, accept_timeout_ms,
-        (bt_rfcomm_uuids['default_uuid'] if uuid is None else uuid))
-
-    return result
-
-
-def pair_pri_to_sec(pri_ad, sec_ad, attempts=2, auto_confirm=True):
-    """Pairs pri droid to secondary droid.
-
-    Args:
-        pri_ad: Android device initiating connection
-        sec_ad: Android device accepting connection
-        attempts: Number of attempts to try until failure.
-        auto_confirm: Auto confirm passkey match for both devices
-
-    Returns:
-        Pass if True
-        Fail if False
-    """
-    pri_ad.droid.bluetoothStartConnectionStateChangeMonitor(
-        sec_ad.droid.bluetoothGetLocalAddress())
-    curr_attempts = 0
-    while curr_attempts < attempts:
-        if _pair_pri_to_sec(pri_ad, sec_ad, auto_confirm):
-            return True
-        # Wait 2 seconds before unbound
-        time.sleep(2)
-        if not clear_bonded_devices(pri_ad):
-            log.error(
-                "Failed to clear bond for primary device at attempt {}".format(
-                    str(curr_attempts)))
-            return False
-        if not clear_bonded_devices(sec_ad):
-            log.error("Failed to clear bond for secondary device at attempt {}".
-                      format(str(curr_attempts)))
-            return False
-        # Wait 2 seconds after unbound
-        time.sleep(2)
-        curr_attempts += 1
-    log.error("pair_pri_to_sec failed to connect after {} attempts".format(
-        str(attempts)))
-    return False
-
-
-def _pair_pri_to_sec(pri_ad, sec_ad, auto_confirm):
-    # Enable discovery on sec_ad so that pri_ad can find it.
-    # The timeout here is based on how much time it would take for two devices
-    # to pair with each other once pri_ad starts seeing devices.
-    pri_droid = pri_ad.droid
-    sec_droid = sec_ad.droid
-    pri_ad.ed.clear_all_events()
-    sec_ad.ed.clear_all_events()
-    log.info("Bonding device {} to {}".format(
-        pri_droid.bluetoothGetLocalAddress(),
-        sec_droid.bluetoothGetLocalAddress()))
-    sec_droid.bluetoothMakeDiscoverable(bt_default_timeout)
-    target_address = sec_droid.bluetoothGetLocalAddress()
-    log.debug("Starting paring helper on each device")
-    pri_droid.bluetoothStartPairingHelper(auto_confirm)
-    sec_droid.bluetoothStartPairingHelper(auto_confirm)
-    pri_ad.log.info("Primary device starting discovery and executing bond")
-    result = pri_droid.bluetoothDiscoverAndBond(target_address)
-    if not auto_confirm:
-        if not _wait_for_passkey_match(pri_ad, sec_ad):
-            return False
-    # Loop until we have bonded successfully or timeout.
-    end_time = time.time() + bt_default_timeout
-    pri_ad.log.info("Verifying devices are bonded")
-    while time.time() < end_time:
-        bonded_devices = pri_droid.bluetoothGetBondedDevices()
-        bonded = False
-        for d in bonded_devices:
-            if d['address'] == target_address:
-                pri_ad.log.info("Successfully bonded to device")
-                return True
-        time.sleep(0.1)
-    # Timed out trying to bond.
-    pri_ad.log.info("Failed to bond devices.")
-    return False
-
-
-def reset_bluetooth(android_devices):
-    """Resets Bluetooth state of input Android device list.
-
-    Args:
-        android_devices: The Android device list to reset Bluetooth state on.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    for a in android_devices:
-        droid, ed = a.droid, a.ed
-        a.log.info("Reset state of bluetooth on device.")
-        if droid.bluetoothCheckState() is True:
-            droid.bluetoothToggleState(False)
-            expected_bluetooth_off_event_name = bluetooth_off
-            try:
-                ed.pop_event(expected_bluetooth_off_event_name,
-                             bt_default_timeout)
-            except Exception:
-                a.log.error("Failed to toggle Bluetooth off.")
-                return False
-        # temp sleep for b/17723234
-        time.sleep(3)
-        if not bluetooth_enabled_check(a):
-            return False
-    return True
-
-
-def scan_and_verify_n_advertisements(scn_ad, max_advertisements):
-    """Verify that input number of advertisements can be found from the scanning
-    Android device.
-
-    Args:
-        scn_ad: The Android device to start LE scanning on.
-        max_advertisements: The number of advertisements the scanner expects to
-        find.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    test_result = False
-    address_list = []
-    filter_list = scn_ad.droid.bleGenFilterList()
-    scn_ad.droid.bleBuildScanFilter(filter_list)
-    scan_settings = scn_ad.droid.bleBuildScanSetting()
-    scan_callback = scn_ad.droid.bleGenScanCallback()
-    scn_ad.droid.bleStartBleScan(filter_list, scan_settings, scan_callback)
-    start_time = time.time()
-    while (start_time + bt_default_timeout) > time.time():
-        event = None
-        try:
-            event = scn_ad.ed.pop_event(scan_result.format(scan_callback),
-                                        bt_default_timeout)
-        except Empty as error:
-            raise BtTestUtilsError(
-                "Failed to find scan event: {}".format(error))
-        address = event['data']['Result']['deviceInfo']['address']
-        if address not in address_list:
-            address_list.append(address)
-        if len(address_list) == max_advertisements:
-            test_result = True
-            break
-    scn_ad.droid.bleStopBleScan(scan_callback)
-    return test_result
-
-
-def set_bluetooth_codec(android_device,
-                        codec_type,
-                        sample_rate,
-                        bits_per_sample,
-                        channel_mode,
-                        codec_specific_1=0):
-    """Sets the A2DP codec configuration on the AndroidDevice.
-
-    Args:
-        android_device (antlion.controllers.android_device.AndroidDevice): the
-            android device for which to switch the codec.
-        codec_type (str): the desired codec type. Must be a key in
-            bt_constants.codec_types.
-        sample_rate (str): the desired sample rate. Must be a key in
-            bt_constants.sample_rates.
-        bits_per_sample (str): the desired bits per sample. Must be a key in
-            bt_constants.bits_per_samples.
-        channel_mode (str): the desired channel mode. Must be a key in
-            bt_constants.channel_modes.
-        codec_specific_1 (int): the desired bit rate (quality) for LDAC codec.
-    Returns:
-        bool: True if the codec config was successfully changed to the desired
-            values. Else False.
-    """
-    message = ("Set Android Device A2DP Bluetooth codec configuration:\n"
-               "\tCodec: {codec_type}\n"
-               "\tSample Rate: {sample_rate}\n"
-               "\tBits per Sample: {bits_per_sample}\n"
-               "\tChannel Mode: {channel_mode}".format(
-                   codec_type=codec_type,
-                   sample_rate=sample_rate,
-                   bits_per_sample=bits_per_sample,
-                   channel_mode=channel_mode))
-    android_device.log.info(message)
-
-    # Send SL4A command
-    droid, ed = android_device.droid, android_device.ed
-    if not droid.bluetoothA2dpSetCodecConfigPreference(
-            codec_types[codec_type], sample_rates[str(sample_rate)],
-            bits_per_samples[str(bits_per_sample)], channel_modes[channel_mode],
-            codec_specific_1):
-        android_device.log.warning("SL4A command returned False. Codec was not "
-                                   "changed.")
-    else:
-        try:
-            ed.pop_event(bluetooth_a2dp_codec_config_changed,
-                         bt_default_timeout)
-        except Exception:
-            android_device.log.warning("SL4A event not registered. Codec "
-                                       "may not have been changed.")
-
-    # Validate codec value through ADB
-    # TODO (aidanhb): validate codec more robustly using SL4A
-    command = "dumpsys bluetooth_manager | grep -i 'current codec'"
-    out = android_device.adb.shell(command)
-    split_out = out.split(": ")
-    if len(split_out) != 2:
-        android_device.log.warning("Could not verify codec config change "
-                                   "through ADB.")
-    elif split_out[1].strip().upper() != codec_type:
-        android_device.log.error("Codec config was not changed.\n"
-                                 "\tExpected codec: {exp}\n"
-                                 "\tActual codec: {act}".format(
-                                     exp=codec_type, act=split_out[1].strip()))
-        return False
-    android_device.log.info("Bluetooth codec successfully changed.")
-    return True
-
-
-def set_bt_scan_mode(ad, scan_mode_value):
-    """Set Android device's Bluetooth scan mode.
-
-    Args:
-        ad: The Android device to set the scan mode on.
-        scan_mode_value: The value to set the scan mode to.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    droid, ed = ad.droid, ad.ed
-    if scan_mode_value == bt_scan_mode_types['state_off']:
-        disable_bluetooth(droid)
-        scan_mode = droid.bluetoothGetScanMode()
-        reset_bluetooth([ad])
-        if scan_mode != scan_mode_value:
-            return False
-    elif scan_mode_value == bt_scan_mode_types['none']:
-        droid.bluetoothMakeUndiscoverable()
-        scan_mode = droid.bluetoothGetScanMode()
-        if scan_mode != scan_mode_value:
-            return False
-    elif scan_mode_value == bt_scan_mode_types['connectable']:
-        droid.bluetoothMakeUndiscoverable()
-        droid.bluetoothMakeConnectable()
-        scan_mode = droid.bluetoothGetScanMode()
-        if scan_mode != scan_mode_value:
-            return False
-    elif (scan_mode_value == bt_scan_mode_types['connectable_discoverable']):
-        droid.bluetoothMakeDiscoverable()
-        scan_mode = droid.bluetoothGetScanMode()
-        if scan_mode != scan_mode_value:
-            return False
-    else:
-        # invalid scan mode
-        return False
-    return True
-
-
-def set_device_name(droid, name):
-    """Set and check Bluetooth local name on input droid object.
-
-    Args:
-        droid: Droid object to set local name on.
-        name: the Bluetooth local name to set.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    droid.bluetoothSetLocalName(name)
-    time.sleep(2)
-    droid_name = droid.bluetoothGetLocalName()
-    if droid_name != name:
-        return False
-    return True
-
-
-def set_profile_priority(host_ad, client_ad, profiles, priority):
-    """Sets the priority of said profile(s) on host_ad for client_ad"""
-    for profile in profiles:
-        host_ad.log.info("Profile {} on {} for {} set to priority {}".format(
-            profile, host_ad.droid.bluetoothGetLocalName(),
-            client_ad.droid.bluetoothGetLocalAddress(), priority.value))
-        if bt_profile_constants['a2dp_sink'] == profile:
-            host_ad.droid.bluetoothA2dpSinkSetPriority(
-                client_ad.droid.bluetoothGetLocalAddress(), priority.value)
-        elif bt_profile_constants['headset_client'] == profile:
-            host_ad.droid.bluetoothHfpClientSetPriority(
-                client_ad.droid.bluetoothGetLocalAddress(), priority.value)
-        elif bt_profile_constants['pbap_client'] == profile:
-            host_ad.droid.bluetoothPbapClientSetPriority(
-                client_ad.droid.bluetoothGetLocalAddress(), priority.value)
-        else:
-            host_ad.log.error(
-                "Profile {} not yet supported for priority settings".format(
-                    profile))
-
-
-def setup_multiple_devices_for_bt_test(android_devices):
-    """A common setup routine for Bluetooth on input Android device list.
-
-    Things this function sets up:
-    1. Resets Bluetooth
-    2. Set Bluetooth local name to random string of size 4
-    3. Disable BLE background scanning.
-    4. Enable Bluetooth snoop logging.
-
-    Args:
-        android_devices: Android device list to setup Bluetooth on.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    log.info("Setting up Android Devices")
-    # TODO: Temp fix for an selinux error.
-    for ad in android_devices:
-        ad.adb.shell("setenforce 0")
-    threads = []
-    try:
-        for a in android_devices:
-            thread = threading.Thread(target=factory_reset_bluetooth,
-                                      args=([[a]]))
-            threads.append(thread)
-            thread.start()
-        for t in threads:
-            t.join()
-
-        for a in android_devices:
-            d = a.droid
-            # TODO: Create specific RPC command to instantiate
-            # BluetoothConnectionFacade. This is just a workaround.
-            d.bluetoothStartConnectionStateChangeMonitor("")
-            setup_result = d.bluetoothSetLocalName(generate_id_by_size(4))
-            if not setup_result:
-                a.log.error("Failed to set device name.")
-                return setup_result
-            d.bluetoothDisableBLE()
-            utils.set_location_service(a, True)
-            bonded_devices = d.bluetoothGetBondedDevices()
-            for b in bonded_devices:
-                a.log.info("Removing bond for device {}".format(b['address']))
-                d.bluetoothUnbond(b['address'])
-        for a in android_devices:
-            a.adb.shell("setprop persist.bluetooth.btsnooplogmode full")
-            getprop_result = a.adb.shell(
-                "getprop persist.bluetooth.btsnooplogmode") == "full"
-            if not getprop_result:
-                a.log.warning("Failed to enable Bluetooth Hci Snoop Logging.")
-    except Exception as err:
-        log.error("Something went wrong in multi device setup: {}".format(err))
-        return False
-    return setup_result
-
-
-def setup_n_advertisements(adv_ad, num_advertisements):
-    """Setup input number of advertisements on input Android device.
-
-    Args:
-        adv_ad: The Android device to start LE advertisements on.
-        num_advertisements: The number of advertisements to start.
-
-    Returns:
-        advertise_callback_list: List of advertisement callback ids.
-    """
-    adv_ad.droid.bleSetAdvertiseSettingsAdvertiseMode(
-        ble_advertise_settings_modes['low_latency'])
-    advertise_data = adv_ad.droid.bleBuildAdvertiseData()
-    advertise_settings = adv_ad.droid.bleBuildAdvertiseSettings()
-    advertise_callback_list = []
-    for i in range(num_advertisements):
-        advertise_callback = adv_ad.droid.bleGenBleAdvertiseCallback()
-        advertise_callback_list.append(advertise_callback)
-        adv_ad.droid.bleStartBleAdvertising(advertise_callback, advertise_data,
-                                            advertise_settings)
-        try:
-            adv_ad.ed.pop_event(adv_succ.format(advertise_callback),
-                                bt_default_timeout)
-            adv_ad.log.info("Advertisement {} started.".format(i + 1))
-        except Empty as error:
-            adv_ad.log.error("Advertisement {} failed to start.".format(i + 1))
-            raise BtTestUtilsError(
-                "Test failed with Empty error: {}".format(error))
-    return advertise_callback_list
-
-
-def take_btsnoop_log(ad, testcase, testname):
-    """Grabs the btsnoop_hci log on a device and stores it in the log directory
-    of the test class.
-
-    If you want grab the btsnoop_hci log, call this function with android_device
-    objects in on_fail. Bug report takes a relative long time to take, so use
-    this cautiously.
-
-    Args:
-        ad: The android_device instance to take bugreport on.
-        testcase: Name of the test calss that triggered this snoop log.
-        testname: Name of the test case that triggered this bug report.
-    """
-    testname = "".join(x for x in testname if x.isalnum())
-    serial = ad.serial
-    device_model = ad.droid.getBuildModel()
-    device_model = device_model.replace(" ", "")
-    out_name = ','.join((testname, device_model, serial))
-    snoop_path = os.path.join(ad.device_log_path, 'BluetoothSnoopLogs')
-    os.makedirs(snoop_path, exist_ok=True)
-    cmd = ''.join(("adb -s ", serial, " pull ", btsnoop_log_path_on_device, " ",
-                   snoop_path + '/' + out_name, ".btsnoop_hci.log"))
-    exe_cmd(cmd)
-    try:
-        cmd = ''.join(
-            ("adb -s ", serial, " pull ", btsnoop_last_log_path_on_device, " ",
-             snoop_path + '/' + out_name, ".btsnoop_hci.log.last"))
-        exe_cmd(cmd)
-    except Exception as err:
-        testcase.log.info(
-            "File does not exist {}".format(btsnoop_last_log_path_on_device))
-
-
-def take_btsnoop_logs(android_devices, testcase, testname):
-    """Pull btsnoop logs from an input list of android devices.
-
-    Args:
-        android_devices: the list of Android devices to pull btsnoop logs from.
-        testcase: Name of the test calss that triggered this snoop log.
-        testname: Name of the test case that triggered this bug report.
-    """
-    for a in android_devices:
-        take_btsnoop_log(a, testcase, testname)
-
-
-def teardown_n_advertisements(adv_ad, num_advertisements,
-                              advertise_callback_list):
-    """Stop input number of advertisements on input Android device.
-
-    Args:
-        adv_ad: The Android device to stop LE advertisements on.
-        num_advertisements: The number of advertisements to stop.
-        advertise_callback_list: The list of advertisement callbacks to stop.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    for n in range(num_advertisements):
-        adv_ad.droid.bleStopBleAdvertising(advertise_callback_list[n])
-    return True
-
-
-def verify_server_and_client_connected(client_ad, server_ad, log=True):
-    """Verify that input server and client Android devices are connected.
-
-    This code is under the assumption that there will only be
-    a single connection.
-
-    Args:
-        client_ad: the Android device to check number of active connections.
-        server_ad: the Android device to check number of active connections.
-
-    Returns:
-        True both server and client have at least 1 active connection,
-        false if unsuccessful.
-    """
-    test_result = True
-    if len(server_ad.droid.bluetoothSocketConnActiveConnections()) == 0:
-        if log:
-            server_ad.log.error("No socket connections found on server.")
-        test_result = False
-    if len(client_ad.droid.bluetoothSocketConnActiveConnections()) == 0:
-        if log:
-            client_ad.log.error("No socket connections found on client.")
-        test_result = False
-    return test_result
-
-
-def wait_for_bluetooth_manager_state(droid,
-                                     state=None,
-                                     timeout=10,
-                                     threshold=5):
-    """ Waits for BlueTooth normalized state or normalized explicit state
-    args:
-        droid: droid device object
-        state: expected BlueTooth state
-        timeout: max timeout threshold
-        threshold: list len of bt state
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    all_states = []
-    get_state = lambda: droid.bluetoothGetLeState()
-    start_time = time.time()
-    while time.time() < start_time + timeout:
-        all_states.append(get_state())
-        if len(all_states) >= threshold:
-            # for any normalized state
-            if state is None:
-                if len(set(all_states[-threshold:])) == 1:
-                    log.info("State normalized {}".format(
-                        set(all_states[-threshold:])))
-                    return True
-            else:
-                # explicit check against normalized state
-                if set([state]).issubset(all_states[-threshold:]):
-                    return True
-        time.sleep(0.5)
-    log.error(
-        "Bluetooth state fails to normalize" if state is None else
-        "Failed to match bluetooth state, current state {} expected state {}".
-        format(get_state(), state))
-    return False
-
-
-def _wait_for_passkey_match(pri_ad, sec_ad):
-    pri_pin, sec_pin = -1, 1
-    pri_variant, sec_variant = -1, 1
-    pri_pairing_req, sec_pairing_req = None, None
-    try:
-        pri_pairing_req = pri_ad.ed.pop_event(
-            event_name="BluetoothActionPairingRequest",
-            timeout=bt_default_timeout)
-        pri_variant = pri_pairing_req["data"]["PairingVariant"]
-        pri_pin = pri_pairing_req["data"]["Pin"]
-        pri_ad.log.info("Primary device received Pin: {}, Variant: {}".format(
-            pri_pin, pri_variant))
-        sec_pairing_req = sec_ad.ed.pop_event(
-            event_name="BluetoothActionPairingRequest",
-            timeout=bt_default_timeout)
-        sec_variant = sec_pairing_req["data"]["PairingVariant"]
-        sec_pin = sec_pairing_req["data"]["Pin"]
-        sec_ad.log.info("Secondary device received Pin: {}, Variant: {}".format(
-            sec_pin, sec_variant))
-    except Empty as err:
-        log.error("Wait for pin error: {}".format(err))
-        log.error("Pairing request state, Primary: {}, Secondary: {}".format(
-            pri_pairing_req, sec_pairing_req))
-        return False
-    if pri_variant == sec_variant == pairing_variant_passkey_confirmation:
-        confirmation = pri_pin == sec_pin
-        if confirmation:
-            log.info("Pairing code matched, accepting connection")
-        else:
-            log.info("Pairing code mismatched, rejecting connection")
-        pri_ad.droid.eventPost("BluetoothActionPairingRequestUserConfirm",
-                               str(confirmation))
-        sec_ad.droid.eventPost("BluetoothActionPairingRequestUserConfirm",
-                               str(confirmation))
-        if not confirmation:
-            return False
-    elif pri_variant != sec_variant:
-        log.error("Pairing variant mismatched, abort connection")
-        return False
-    return True
-
-
-def write_read_verify_data(client_ad, server_ad, msg, binary=False):
-    """Verify that the client wrote data to the server Android device correctly.
-
-    Args:
-        client_ad: the Android device to perform the write.
-        server_ad: the Android device to read the data written.
-        msg: the message to write.
-        binary: if the msg arg is binary or not.
-
-    Returns:
-        True if the data written matches the data read, false if not.
-    """
-    client_ad.log.info("Write message.")
-    try:
-        if binary:
-            client_ad.droid.bluetoothSocketConnWriteBinary(msg)
-        else:
-            client_ad.droid.bluetoothSocketConnWrite(msg)
-    except Exception as err:
-        client_ad.log.error("Failed to write data: {}".format(err))
-        return False
-    server_ad.log.info("Read message.")
-    try:
-        if binary:
-            read_msg = server_ad.droid.bluetoothSocketConnReadBinary().rstrip(
-                "\r\n")
-        else:
-            read_msg = server_ad.droid.bluetoothSocketConnRead()
-    except Exception as err:
-        server_ad.log.error("Failed to read data: {}".format(err))
-        return False
-    log.info("Verify message.")
-    if msg != read_msg:
-        log.error("Mismatch! Read: {}, Expected: {}".format(read_msg, msg))
-        return False
-    return True
-
-
-class MediaControlOverSl4a(object):
-    """Media control using sl4a facade for general purpose.
-
-    """
-
-    def __init__(self, android_device, music_file):
-        """Initialize the media_control class.
-
-        Args:
-            android_dut: android_device object
-            music_file: location of the music file
-        """
-        self.android_device = android_device
-        self.music_file = music_file
-
-    def play(self):
-        """Play media.
-
-        """
-        self.android_device.droid.mediaPlayOpen('file://%s' % self.music_file,
-                                                'default', True)
-        playing = self.android_device.droid.mediaIsPlaying()
-        asserts.assert_true(playing,
-                            'Failed to play music %s' % self.music_file)
-
-    def pause(self):
-        """Pause media.
-
-        """
-        self.android_device.droid.mediaPlayPause('default')
-        paused = not self.android_device.droid.mediaIsPlaying()
-        asserts.assert_true(paused,
-                            'Failed to pause music %s' % self.music_file)
-
-    def resume(self):
-        """Resume media.
-
-        """
-        self.android_device.droid.mediaPlayStart('default')
-        playing = self.android_device.droid.mediaIsPlaying()
-        asserts.assert_true(playing,
-                            'Failed to play music %s' % self.music_file)
-
-    def stop(self):
-        """Stop media.
-
-        """
-        self.android_device.droid.mediaPlayStop('default')
-        stopped = not self.android_device.droid.mediaIsPlaying()
-        asserts.assert_true(stopped,
-                            'Failed to stop music %s' % self.music_file)
diff --git a/src/antlion/test_utils/bt/bta_lib.py b/src/antlion/test_utils/bt/bta_lib.py
deleted file mode 100644
index 60e0980..0000000
--- a/src/antlion/test_utils/bt/bta_lib.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Bluetooth adapter libraries
-"""
-
-from antlion.test_utils.bt.bt_constants import bt_scan_mode_types
-from antlion.test_utils.bt.bt_test_utils import set_bt_scan_mode
-
-import pprint
-
-
-class BtaLib():
-    def __init__(self, log, dut, target_mac_address=None):
-        self.advertisement_list = []
-        self.dut = dut
-        self.log = log
-        self.target_mac_addr = target_mac_address
-
-    def set_target_mac_addr(self, mac_addr):
-        self.target_mac_addr = mac_addr
-
-    def set_scan_mode(self, scan_mode):
-        """Set the Scan mode of the Bluetooth Adapter"""
-        set_bt_scan_mode(self.dut, bt_scan_mode_types[scan_mode])
-
-    def set_device_name(self, line):
-        """Set Bluetooth Adapter Name"""
-        self.dut.droid.bluetoothSetLocalName(line)
-
-    def enable(self):
-        """Enable Bluetooth Adapter"""
-        self.dut.droid.bluetoothToggleState(True)
-
-    def disable(self):
-        """Disable Bluetooth Adapter"""
-        self.dut.droid.bluetoothToggleState(False)
-
-    def init_bond(self):
-        """Initiate bond to PTS device"""
-        self.dut.droid.bluetoothDiscoverAndBond(self.target_mac_addr)
-
-    def start_discovery(self):
-        """Start BR/EDR Discovery"""
-        self.dut.droid.bluetoothStartDiscovery()
-
-    def stop_discovery(self):
-        """Stop BR/EDR Discovery"""
-        self.dut.droid.bluetoothCancelDiscovery()
-
-    def get_discovered_devices(self):
-        """Get Discovered Br/EDR Devices"""
-        if self.dut.droid.bluetoothIsDiscovering():
-            self.dut.droid.bluetoothCancelDiscovery()
-        self.log.info(
-            pprint.pformat(self.dut.droid.bluetoothGetDiscoveredDevices()))
-
-    def bond(self):
-        """Bond to PTS device"""
-        self.dut.droid.bluetoothBond(self.target_mac_addr)
-
-    def disconnect(self):
-        """BTA disconnect"""
-        self.dut.droid.bluetoothDisconnectConnected(self.target_mac_addr)
-
-    def unbond(self):
-        """Unbond from PTS device"""
-        self.dut.droid.bluetoothUnbond(self.target_mac_addr)
-
-    def start_pairing_helper(self, line):
-        """Start or stop Bluetooth Pairing Helper"""
-        if line:
-            self.dut.droid.bluetoothStartPairingHelper(bool(line))
-        else:
-            self.dut.droid.bluetoothStartPairingHelper()
-
-    def push_pairing_pin(self, line):
-        """Push pairing pin to the Android Device"""
-        self.dut.droid.eventPost("BluetoothActionPairingRequestUserConfirm",
-                                 line)
-
-    def get_pairing_pin(self):
-        """Get pairing PIN"""
-        self.log.info(
-            self.dut.ed.pop_event("BluetoothActionPairingRequest", 1))
-
-    def fetch_uuids_with_sdp(self):
-        """BTA fetch UUIDS with SDP"""
-        self.log.info(
-            self.dut.droid.bluetoothFetchUuidsWithSdp(self.target_mac_addr))
-
-    def connect_profiles(self):
-        """Connect available profiles"""
-        self.dut.droid.bluetoothConnectBonded(self.target_mac_addr)
-
-    def tts_speak(self):
-        """Open audio channel by speaking characters"""
-        self.dut.droid.ttsSpeak(self.target_mac_addr)
diff --git a/src/antlion/test_utils/bt/config_lib.py b/src/antlion/test_utils/bt/config_lib.py
deleted file mode 100644
index c1db8f5..0000000
--- a/src/antlion/test_utils/bt/config_lib.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Bluetooth Config Pusher
-"""
-
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_mtu
-from antlion.test_utils.bt.bt_gatt_utils import log_gatt_server_uuids
-
-import os
-
-
-class ConfigLib():
-    bluetooth_config_path = "/system/etc/bluetooth/bt_stack.conf"
-    conf_path = "{}/configs".format(os.path.dirname(
-        os.path.realpath(__file__)))
-    reset_config_path = "{}/bt_stack.conf".format(conf_path)
-    non_bond_config_path = "{}/non_bond_bt_stack.conf".format(conf_path)
-    disable_mitm_config_path = "{}/dis_mitm_bt_stack.conf".format(conf_path)
-
-    def __init__(self, log, dut):
-        self.dut = dut
-        self.log = log
-
-    def _reset_bluetooth(self):
-        self.dut.droid.bluetoothToggleState(False)
-        self.dut.droid.bluetoothToggleState(True)
-
-    def reset(self):
-        self.dut.adb.push("{} {}".format(self.reset_config_path,
-                                         self.bluetooth_config_path))
-        self._reset_bluetooth()
-
-    def set_nonbond(self):
-        self.dut.adb.push("{} {}".format(self.non_bond_config_path,
-                                         self.bluetooth_config_path))
-        self._reset_bluetooth()
-
-    def set_disable_mitm(self):
-        self.dut.adb.push("{} {}".format(self.disable_mitm_config_path,
-                                         self.bluetooth_config_path))
-        self._reset_bluetooth()
diff --git a/src/antlion/test_utils/bt/configs/bt_stack.conf b/src/antlion/test_utils/bt/configs/bt_stack.conf
deleted file mode 100644
index 4bcf15a..0000000
--- a/src/antlion/test_utils/bt/configs/bt_stack.conf
+++ /dev/null
@@ -1,29 +0,0 @@
-# Enable trace level reconfiguration function
-# Must be present before any TRC_ trace level settings
-TraceConf=true
-
-# Trace level configuration
-#   BT_TRACE_LEVEL_NONE    0    ( No trace messages to be generated )
-#   BT_TRACE_LEVEL_ERROR   1    ( Error condition trace messages )
-#   BT_TRACE_LEVEL_WARNING 2    ( Warning condition trace messages )
-#   BT_TRACE_LEVEL_API     3    ( API traces )
-#   BT_TRACE_LEVEL_EVENT   4    ( Debug messages for events )
-#   BT_TRACE_LEVEL_DEBUG   5    ( Full debug messages )
-#   BT_TRACE_LEVEL_VERBOSE 6    ( Verbose messages ) - Currently supported for TRC_BTAPP only.
-TRC_BTM=5
-TRC_HCI=5
-TRC_L2CAP=5
-TRC_RFCOMM=5
-TRC_OBEX=5
-TRC_AVCT=5
-TRC_AVDT=5
-TRC_AVRC=5
-TRC_AVDT_SCB=5
-TRC_AVDT_CCB=5
-TRC_A2D=2
-TRC_SDP=5
-TRC_GATT=5
-TRC_SMP=5
-TRC_BTAPP=5
-TRC_BTIF=5
-
diff --git a/src/antlion/test_utils/bt/configs/dis_mitm_bt_stack.conf b/src/antlion/test_utils/bt/configs/dis_mitm_bt_stack.conf
deleted file mode 100644
index 120fc1e..0000000
--- a/src/antlion/test_utils/bt/configs/dis_mitm_bt_stack.conf
+++ /dev/null
@@ -1,30 +0,0 @@
-# Enable trace level reconfiguration function
-# Must be present before any TRC_ trace level settings
-TraceConf=true
-
-# Trace level configuration
-#   BT_TRACE_LEVEL_NONE    0    ( No trace messages to be generated )
-#   BT_TRACE_LEVEL_ERROR   1    ( Error condition trace messages )
-#   BT_TRACE_LEVEL_WARNING 2    ( Warning condition trace messages )
-#   BT_TRACE_LEVEL_API     3    ( API traces )
-#   BT_TRACE_LEVEL_EVENT   4    ( Debug messages for events )
-#   BT_TRACE_LEVEL_DEBUG   5    ( Full debug messages )
-#   BT_TRACE_LEVEL_VERBOSE 6    ( Verbose messages ) - Currently supported for TRC_BTAPP only.
-TRC_BTM=5
-TRC_HCI=5
-TRC_L2CAP=5
-TRC_RFCOMM=5
-TRC_OBEX=5
-TRC_AVCT=5
-TRC_AVDT=5
-TRC_AVRC=5
-TRC_AVDT_SCB=5
-TRC_AVDT_CCB=5
-TRC_A2D=2
-TRC_SDP=5
-TRC_GATT=5
-TRC_SMP=5
-TRC_BTAPP=5
-TRC_BTIF=5
-
-PTS_SmpOptions=0x9,0x4,0xf,0xf,0x10
diff --git a/src/antlion/test_utils/bt/configs/non_bond_bt_stack.conf b/src/antlion/test_utils/bt/configs/non_bond_bt_stack.conf
deleted file mode 100644
index 3dedf7e..0000000
--- a/src/antlion/test_utils/bt/configs/non_bond_bt_stack.conf
+++ /dev/null
@@ -1,30 +0,0 @@
-# Enable trace level reconfiguration function
-# Must be present before any TRC_ trace level settings
-TraceConf=true
-
-# Trace level configuration
-#   BT_TRACE_LEVEL_NONE    0    ( No trace messages to be generated )
-#   BT_TRACE_LEVEL_ERROR   1    ( Error condition trace messages )
-#   BT_TRACE_LEVEL_WARNING 2    ( Warning condition trace messages )
-#   BT_TRACE_LEVEL_API     3    ( API traces )
-#   BT_TRACE_LEVEL_EVENT   4    ( Debug messages for events )
-#   BT_TRACE_LEVEL_DEBUG   5    ( Full debug messages )
-#   BT_TRACE_LEVEL_VERBOSE 6    ( Verbose messages ) - Currently supported for TRC_BTAPP only.
-TRC_BTM=5
-TRC_HCI=5
-TRC_L2CAP=5
-TRC_RFCOMM=5
-TRC_OBEX=5
-TRC_AVCT=5
-TRC_AVDT=5
-TRC_AVRC=5
-TRC_AVDT_SCB=5
-TRC_AVDT_CCB=5
-TRC_A2D=2
-TRC_SDP=5
-TRC_GATT=5
-TRC_SMP=5
-TRC_BTAPP=5
-TRC_BTIF=5
-
-PTS_SmpOptions=0xC,0x4,0xf,0xf,0x10
diff --git a/src/antlion/test_utils/bt/gatt_test_database.py b/src/antlion/test_utils/bt/gatt_test_database.py
deleted file mode 100644
index 6c452cd..0000000
--- a/src/antlion/test_utils/bt/gatt_test_database.py
+++ /dev/null
@@ -1,1705 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import gatt_char_types
-from antlion.test_utils.bt.bt_constants import gatt_characteristic_value_format
-from antlion.test_utils.bt.bt_constants import gatt_char_desc_uuids
-
-STRING_512BYTES = '''
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-111112222233
-'''
-STRING_50BYTES = '''
-11111222223333344444555556666677777888889999900000
-'''
-STRING_25BYTES = '''
-1111122222333334444455555
-'''
-
-INVALID_SMALL_DATABASE = {
-    'services': [{
-        'uuid': '00001800-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': gatt_char_types['device_name'],
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x0003,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'Test Database'
-        }, {
-            'uuid': gatt_char_types['appearance'],
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x0005,
-            'value_type': gatt_characteristic_value_format['sint32'],
-            'offset': 0,
-            'value': 17
-        }, {
-            'uuid': gatt_char_types['peripheral_pref_conn'],
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x0007
-        }]
-    }, {
-        'uuid': '00001801-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': gatt_char_types['service_changed'],
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'instance_id': 0x0012,
-            'value_type': gatt_characteristic_value_format['byte'],
-            'value': [0x0000],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }]
-        }, {
-            'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x0015,
-            'value_type': gatt_characteristic_value_format['byte'],
-            'value': [0x04]
-        }]
-    }]
-}
-
-# Corresponds to the PTS defined LARGE_DB_1
-LARGE_DB_1 = {
-    'services': [
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 7,
-            'characteristics': [{
-                'uuid': '0000b008-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'] |
-                gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x08],
-                'descriptors': [{
-                    'uuid': '0000b015-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                }, {
-                    'uuid': '0000b016-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                }, {
-                    'uuid': '0000b017-0000-1000-8000-00805f9b34fb',
-                    'permissions':
-                    gatt_characteristic['permission_read_encrypted_mitm'],
-                }]
-            }]
-        },
-        {
-            'uuid': '0000a00d-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['secondary'],
-            'handles': 6,
-            'characteristics': [{
-                'uuid': '0000b00c-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0C],
-            }, {
-                'uuid': '0000b00b-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0B],
-            }]
-        },
-        {
-            'uuid': '0000a00a-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 10,
-            'characteristics': [{
-                'uuid': '0000b001-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x01],
-            }, {
-                'uuid': '0000b002-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            }, {
-                'uuid': '0000b004-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            }, {
-                'uuid': '0000b002-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '11111222223333344444555556666677777888889999900000',
-            }, {
-                'uuid': '0000b003-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x03],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 3,
-            'characteristics': [{
-                'uuid': '0000b007-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x07],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 3,
-            'characteristics': [{
-                'uuid': '0000b006-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'] |
-                gatt_characteristic['property_write_no_response'] |
-                gatt_characteristic['property_notify'] |
-                gatt_characteristic['property_indicate'],
-                'permissions': gatt_characteristic['permission_write'] |
-                gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x06],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 12,
-            'characteristics': [
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_write'] |
-                    gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                },
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_write'] |
-                    gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['server_char_cfg'],
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': gatt_descriptor['disable_notification_value']
-                    }]
-                },
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'properties': 0x0,
-                    'permissions': 0x0,
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                    'descriptors': [{
-                        'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': [
-                            0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88,
-                            0x99, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                            0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44,
-                            0x55, 0x66, 0x77, 0x88, 0x99, 0x00, 0x11, 0x22,
-                            0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                            0x11, 0x22, 0x33
-                        ]
-                    }]
-                },
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                    'descriptors': [{
-                        'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [
-                            0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88,
-                            0x99, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                            0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44,
-                            0x55, 0x66, 0x77, 0x88, 0x99, 0x00, 0x11, 0x22,
-                            0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                            0x11, 0x22, 0x33
-                        ]
-                    }]
-                },
-            ]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 7,
-            'characteristics': [{
-                'uuid': '0000b005-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_write'] |
-                gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_write'] |
-                gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x05],
-                'descriptors': [{
-                    'uuid': gatt_char_desc_uuids['char_ext_props'],
-                    'permissions': gatt_descriptor['permission_read'],
-                    'value': [0x03, 0x00]
-                }, {
-                    'uuid': gatt_char_desc_uuids['char_user_desc'],
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x65, 0x66, 0x67, 0x68, 0x69, 0x70, 0x71, 0x72, 0x73,
-                        0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x80, 0x81, 0x82,
-                        0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x90
-                    ]
-                }, {
-                    'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                    'permissions':
-                    gatt_descriptor['permission_read_encrypted_mitm'],
-                    'value': [0x00, 0x01, 0x30, 0x01, 0x11, 0x31]
-                }, {
-                    'uuid': '0000d5d4-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_read'],
-                    'value': [0x44]
-                }]
-            }]
-        },
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abcdef',
-            'type': gatt_service_types['primary'],
-            'handles': 7,
-            'characteristics': [{
-                'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                'enforce_initial_attribute_length': True,
-                'properties': gatt_characteristic['property_write'] |
-                gatt_characteristic['property_extended_props'] |
-                gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_write'] |
-                gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x09],
-                'descriptors': [{
-                    'uuid': gatt_char_desc_uuids['char_ext_props'],
-                    'permissions': gatt_descriptor['permission_read'],
-                    'value': gatt_descriptor['enable_notification_value']
-                }, {
-                    'uuid': '0000d9d2-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [0x22]
-                }, {
-                    'uuid': '0000d9d3-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_write'],
-                    'value': [0x33]
-                }]
-            }]
-        },
-        {
-            'uuid': '0000a00f-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 18,
-            'characteristics': [
-                {
-                    'uuid': '0000b00e-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': "Length is ",
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x19, 0x00, 0x00, 0x30, 0x01, 0x00, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b00f-0000-1000-8000-00805f9b34fb',
-                    'enforce_initial_attribute_length': True,
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x65],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x04, 0x00, 0x01, 0x27, 0x01, 0x01, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b006-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x34, 0x12],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x06, 0x00, 0x10, 0x27, 0x01, 0x02, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b007-0000-1000-8000-00805f9b34fb',
-                    'enforce_initial_attribute_length': True,
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04, 0x03, 0x02, 0x01],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x08, 0x00, 0x17, 0x27, 0x01, 0x03, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b010-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x65, 0x34, 0x12, 0x04, 0x03, 0x02, 0x01],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_agreg_fmt'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0xa6, 0x00, 0xa9, 0x00, 0xac, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b011-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['write_type_signed']
-                    |  #for some reason 0x40 is not working...
-                    gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x12]
-                }
-            ]
-        },
-        {
-            'uuid': '0000a00c-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 30,
-            'characteristics': [{
-                'uuid': '0000b00a-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0a],
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "111112222233333444445",
-                'descriptors': [{
-                    'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x11
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "2222233333444445555566",
-                'descriptors': [{
-                    'uuid': '0000b013-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x11, 0x22
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "33333444445555566666777",
-                'descriptors': [{
-                    'uuid': '0000b014-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x11, 0x22, 0x33
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33
-                ],
-                'descriptors': [{
-                    'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44
-                ],
-                'descriptors': [{
-                    'uuid': '0000b013-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55
-                ],
-                'descriptors': [{
-                    'uuid': '0000b014-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "1111122222333334444455555666667777788888999",
-                'descriptors': [{
-                    'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "22222333334444455555666667777788888999990000",
-                'descriptors': [{
-                    'uuid': '0000b013-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "333334444455555666667777788888999990000011111",
-                'descriptors': [{
-                    'uuid': '0000b014-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55
-                    ]
-                }]
-            }]
-        },
-    ]
-}
-
-# Corresponds to the PTS defined LARGE_DB_2
-LARGE_DB_2 = {
-    'services': [
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abdcef',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b00a-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0003,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x04],
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0005,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '111112222233333444445',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0007,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '2222233333444445555566',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0009,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '33333444445555566666777',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x000b,
-                'properties': 0x0a0,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '1111122222333334444455555666667777788888999',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x000d,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '22222333334444455555666667777788888999990000',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x000f,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '333334444455555666667777788888999990000011111',
-            }]
-        },
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abcdef',
-            'handles': 5,
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                'instance_id': 0x0023,
-                'properties': 0x8a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x09],
-                'descriptors': [{
-                    'uuid': '0000d9d2-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [0x22]
-                }, {
-                    'uuid': '0000d9d3-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_write'],
-                    'value': [0x33]
-                }, {
-                    'uuid': gatt_char_desc_uuids['char_ext_props'],
-                    'permissions': gatt_descriptor['permission_write'],
-                    'value': gatt_descriptor['enable_notification_value']
-                }]
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b007-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0012,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x04],
-            }]
-        },
-    ]
-}
-
-DB_TEST = {
-    'services': [{
-        'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-            'properties': 0x02 | 0x08,
-            'permissions': 0x10 | 0x01,
-            'value_type': gatt_characteristic_value_format['byte'],
-            'value': [0x01],
-            'enforce_initial_attribute_length': True,
-            'descriptors': [{
-                'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-                'value': [0x01] * 30
-            }]
-        }, ]
-    }]
-}
-
-PTS_TEST2 = {
-    'services': [{
-        'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [
-            {
-                'uuid': '000018ba-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000060aa-0000-0000-0123-456789abcdef',
-                'properties': 0x02,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x20,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000004d5e-0000-1000-8000-00805f9b34fb',
-                'properties': 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000001b44-0000-1000-8000-00805f9b34fb',
-                'properties': 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000006b98-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08 | 0x10 | 0x04,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000247f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000247f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000247f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x00,
-                'permissions': 0x00,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000247f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000d62-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08 | 0x80,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000002e85-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000004a64-0000-0000-0123-456789abcdef',
-                'properties': 0x02 | 0x08 | 0x80,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000005b4a-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000001c81-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000006b98-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000001b44-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '0000014dd-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000008f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000002aad-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000002ab0-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000002ab3-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-        ]
-    }]
-}
-
-PTS_TEST = {
-    'services': [{
-        'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [
-            {
-                'uuid': '000018ba-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_25BYTES,
-            },
-            {
-                'uuid': '000060aa-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_25BYTES,
-            },
-        ]
-    }]
-}
-
-# Corresponds to the PTS defined LARGE_DB_3
-LARGE_DB_3 = {
-    'services': [
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x0003,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                },
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x0013,
-                    'properties': 0x10,
-                    'permissions': 0x17,
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                    'descriptors': [
-                        {
-                            'uuid': gatt_char_desc_uuids['char_ext_props'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x09]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['char_user_desc'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['client_char_cfg'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x01, 0x00]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['server_char_cfg'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['char_agreg_fmt'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['char_valid_range'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid':
-                            gatt_char_desc_uuids['external_report_reference'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['report_reference'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                    ]
-                },
-                {
-                    'uuid': gatt_char_types['service_changed'],
-                    'instance_id': 0x0023,
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['appearance'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['peripheral_priv_flag'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['reconnection_address'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['system_id'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['model_number_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['serial_number_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['firmware_revision_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['hardware_revision_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['software_revision_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['manufacturer_name_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['pnp_id'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-            ]
-        },
-        {
-            'uuid': '0000a00d-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['secondary'],
-            'handles': 5,
-            'characteristics': [{
-                'uuid': '0000b00c-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0023,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0c],
-            }, {
-                'uuid': '0000b00b-0000-0000-0123-456789abcdef',
-                'instance_id': 0x0025,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0b],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b008-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0032,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x08],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b007-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0042,
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x07],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b006-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0052,
-                'properties': 0x3e,
-                'permissions': gatt_characteristic['permission_write_encrypted_mitm'] |
-                gatt_characteristic['permission_read_encrypted_mitm'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x06],
-            }]
-        },
-        {
-            'uuid': '0000a00a-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 10,
-            'characteristics': [{
-                'uuid': '0000b001-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0074,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x01],
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'enforce_initial_attribute_length': True,
-                'instance_id': 0x0076,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '11111222223333344444555556666677777888889999900000',
-            }, {
-                'uuid': '0000b003-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0078,
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x03],
-            }]
-        },
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abcdef',
-            'type': gatt_service_types['primary'],
-            'handles': 10,
-            'characteristics': [{
-                'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                'instance_id': 0x0082,
-                'properties': 0x8a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x09],
-                'descriptors': [
-                    {
-                        'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': [0x09]
-                    },
-                    {
-                        'uuid': '0000d9d2-0000-0000-0123-456789abcdef',
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': [0x22]
-                    },
-                    {
-                        'uuid': gatt_char_desc_uuids['char_ext_props'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x01, 0x00]
-                    },
-                    {
-                        'uuid': '0000d9d3-0000-0000-0123-456789abcdef',
-                        'permissions': gatt_descriptor['permission_write'],
-                        'value': [0x22]
-                    },
-                ]
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-0000-0123-456789abcdef',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                'instance_id': 0x0092,
-                'properties': 0x8a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x05],
-                'descriptors': [
-                    {
-                        'uuid': gatt_char_desc_uuids['char_user_desc'],
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': [0] * 26
-                    },
-                    {
-                        'uuid': gatt_char_desc_uuids['char_ext_props'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x03, 0x00]
-                    },
-                    {
-                        'uuid': '0000d5d4-0000-0000-0123-456789abcdef',
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x44]
-                    },
-                    {
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x04, 0x00, 0x01, 0x30, 0x01, 0x11, 0x31]
-                    },
-                ]
-            }]
-        },
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abcdef',
-            'type': gatt_service_types['primary'],
-            'characteristics': [
-                {
-                    'uuid': '0000b00a-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00a2,
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x0a],
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00a4,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '111112222233333444445',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00a6,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '2222233333444445555566',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00a8,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '33333444445555566666777',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00aa,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '1111122222333334444455555666667777788888999',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00ac,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '22222333334444455555666667777788888999990000',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00ae,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-            ]
-        },
-        {
-            'uuid': '0000a00e-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b00d-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0xffff,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0d],
-            }]
-        },
-    ]
-}
-
-TEST_DB_1 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002a,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'],
-                'value': [0x01]
-            }]
-        }]
-    }]
-}
-
-TEST_DB_2 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions':
-            gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002a,
-        }, {
-            'uuid': '00002a30-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions':
-            gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002b,
-        }]
-    }]
-}
-
-TEST_DB_3 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002a,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'],
-                'value': [0x01]
-            }, {
-                'uuid': '00002a20-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-                'instance_id': 0x002c,
-                'value': [0x01]
-            }]
-        }, {
-            'uuid': '00002a30-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002b,
-        }]
-    }]
-}
-
-TEST_DB_4 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': "test",
-            'instance_id': 0x002a,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions':
-                gatt_descriptor['permission_read_encrypted_mitm'],
-                'value': [0] * 512
-            }]
-        }]
-    }]
-}
-
-TEST_DB_5 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': 'b2c83efa-34ca-11e6-ac61-9e71128cae77',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['byte'],
-            'value': [0x1],
-            'instance_id': 0x002c,
-            'descriptors': [{
-                'uuid': '00002902-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }]
-        }]
-    }]
-}
-
-TEST_DB_6 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] | gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002a,
-            'descriptors': [{
-                'uuid': '00002a19-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-                'value': [0x01] * 30
-            }]
-        }]
-    }]
-}
-
-SIMPLE_READ_DESCRIPTOR = {
-    'services': [{
-        'uuid': '0000a00a-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': 'aa7edd5a-4d1d-4f0e-883a-d145616a1630',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x002a,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'Test Database',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }]
-    }]
-}
-
-CHARACTERISTIC_PROPERTY_WRITE_NO_RESPONSE = {
-    'services': [{
-        'uuid': '0000a00a-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': 'aa7edd5a-4d1d-4f0e-883a-d145616a1630',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_write'] |
-            gatt_characteristic['permission_read'],
-            'instance_id': 0x0042,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'Test Database'
-        }, {
-            'uuid': 'aa7edd6a-4d1d-4f0e-883a-d145616a1630',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_write'] |
-            gatt_characteristic['permission_read'],
-            'instance_id': 0x004d,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'Test Database'
-        }]
-    }]
-}
-
-GATT_SERVER_DB_MAPPING = {
-    'LARGE_DB_1': LARGE_DB_1,
-    'LARGE_DB_3': LARGE_DB_3,
-    'INVALID_SMALL_DATABASE': INVALID_SMALL_DATABASE,
-    'SIMPLE_READ_DESCRIPTOR': SIMPLE_READ_DESCRIPTOR,
-    'CHARACTERISTIC_PROPERTY_WRITE_NO_RESPONSE':
-    CHARACTERISTIC_PROPERTY_WRITE_NO_RESPONSE,
-    'TEST_DB_1': TEST_DB_1,
-    'TEST_DB_2': TEST_DB_2,
-    'TEST_DB_3': TEST_DB_3,
-    'TEST_DB_4': TEST_DB_4,
-    'TEST_DB_5': TEST_DB_5,
-    'LARGE_DB_3_PLUS': LARGE_DB_3,
-    'DB_TEST': DB_TEST,
-    'PTS_TEST': PTS_TEST,
-    'PTS_TEST2': PTS_TEST2,
-    'TEST_DB_6': TEST_DB_6,
-}
diff --git a/src/antlion/test_utils/bt/gattc_lib.py b/src/antlion/test_utils/bt/gattc_lib.py
deleted file mode 100644
index efd0c4a..0000000
--- a/src/antlion/test_utils/bt/gattc_lib.py
+++ /dev/null
@@ -1,575 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-GATT Client Libraries
-"""
-
-from antlion.test_utils.bt.bt_constants import default_le_connection_interval_ms
-from antlion.test_utils.bt.bt_constants import default_bluetooth_socket_timeout_ms
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_mtu
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_char_desc_uuids
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_transport
-from antlion.test_utils.bt.bt_constants import le_default_supervision_timeout
-from antlion.test_utils.bt.bt_constants import le_connection_interval_time_step_ms
-from antlion.test_utils.bt.bt_constants import scan_result
-from antlion.test_utils.bt.bt_gatt_utils import log_gatt_server_uuids
-
-import time
-
-
-class GattClientLib():
-    def __init__(self, log, dut, target_mac_addr=None):
-        self.dut = dut
-        self.log = log
-        self.gatt_callback = None
-        self.bluetooth_gatt = None
-        self.discovered_services_index = None
-        self.target_mac_addr = target_mac_addr
-        self.generic_uuid = "0000{}-0000-1000-8000-00805f9b34fb"
-
-    def set_target_mac_addr(self, mac_addr):
-        self.target_mac_addr = mac_addr
-
-    def connect_over_le_based_off_name(self, autoconnect, name):
-        """Perform GATT connection over LE"""
-        self.dut.droid.bleSetScanSettingsScanMode(
-            ble_scan_settings_modes['low_latency'])
-        filter_list = self.dut.droid.bleGenFilterList()
-        scan_settings = self.dut.droid.bleBuildScanSetting()
-        scan_callback = self.dut.droid.bleGenScanCallback()
-        event_name = scan_result.format(scan_callback)
-        self.dut.droid.bleSetScanFilterDeviceName("BLE Rect")
-        self.dut.droid.bleBuildScanFilter(filter_list)
-        self.dut.droid.bleStartBleScan(filter_list, scan_settings,
-                                       scan_callback)
-
-        try:
-            event = self.dut.ed.pop_event(event_name, 10)
-            self.log.info("Found scan result: {}".format(event))
-        except Exception:
-            self.log.info("Didn't find any scan results.")
-        mac_addr = event['data']['Result']['deviceInfo']['address']
-        self.bluetooth_gatt, self.gatt_callback = setup_gatt_connection(
-            self.dut, mac_addr, autoconnect, transport=gatt_transport['le'])
-        self.dut.droid.bleStopBleScan(scan_callback)
-        self.discovered_services_index = None
-
-    def connect_over_le(self, autoconnect):
-        """Perform GATT connection over LE"""
-        self.bluetooth_gatt, self.gatt_callback = setup_gatt_connection(
-            self.dut,
-            self.target_mac_addr,
-            autoconnect,
-            transport=gatt_transport['le'])
-        self.discovered_services_index = None
-
-    def connect_over_bredr(self):
-        """Perform GATT connection over BREDR"""
-        self.bluetooth_gatt, self.gatt_callback = setup_gatt_connection(
-            self.dut,
-            self.target_mac_addr,
-            False,
-            transport=gatt_transport['bredr'])
-
-    def disconnect(self):
-        """Perform GATT disconnect"""
-        cmd = "Disconnect GATT connection"
-        try:
-            disconnect_gatt_connection(self.dut, self.bluetooth_gatt,
-                                       self.gatt_callback)
-        except Exception as err:
-            self.log.info("Cmd {} failed with {}".format(cmd, err))
-        try:
-            self.dut.droid.gattClientClose(self.bluetooth_gatt)
-        except Exception as err:
-            self.log.info("Cmd failed with {}".format(err))
-
-    def _setup_discovered_services_index(self):
-        if not self.discovered_services_index:
-            self.dut.droid.gattClientDiscoverServices(self.bluetooth_gatt)
-            expected_event = gatt_cb_strings['gatt_serv_disc'].format(
-                self.gatt_callback)
-            event = self.dut.ed.pop_event(expected_event, 10)
-            self.discovered_services_index = event['data']['ServicesIndex']
-
-    def read_char_by_uuid(self, line):
-        """GATT client read Characteristic by UUID."""
-        uuid = line
-        if len(line) == 4:
-            uuid = self.generic_uuid.format(line)
-        self.dut.droid.gattClientReadUsingCharacteristicUuid(
-            self.bluetooth_gatt, uuid, 0x0001, 0xFFFF)
-
-    def request_mtu(self, mtu):
-        """Request MTU Change of input value"""
-        setup_gatt_mtu(self.dut, self.bluetooth_gatt, self.gatt_callback,
-                       int(mtu))
-
-    def list_all_uuids(self):
-        """From the GATT Client, discover services and list all services,
-        chars and descriptors
-        """
-        self._setup_discovered_services_index()
-        log_gatt_server_uuids(self.dut, self.discovered_services_index,
-                              self.bluetooth_gatt)
-
-    def discover_services(self):
-        """GATT Client discover services of GATT Server"""
-        self.dut.droid.gattClientDiscoverServices(self.bluetooth_gatt)
-
-    def refresh(self):
-        """Perform Gatt Client Refresh"""
-        self.dut.droid.gattClientRefresh(self.bluetooth_gatt)
-
-    def read_char_by_instance_id(self, id):
-        """From the GATT Client, discover services and list all services,
-        chars and descriptors
-        """
-        if not id:
-            self.log.info("Invalid id")
-            return
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index, int(id, 16))
-
-    def write_char_by_instance_id(self, line):
-        """GATT Client Write to Characteristic by instance ID"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def write_char_by_instance_id_value(self, line):
-        """GATT Client Write to Characteristic by instance ID"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        write_value = args[1]
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), [int(write_value)])
-
-    def mod_write_char_by_instance_id(self, line):
-        """GATT Client Write to Char that doesn't have write permission"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndWriteCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def write_invalid_char_by_instance_id(self, line):
-        """GATT Client Write to Char that doesn't exists"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteInvalidCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def mod_read_char_by_instance_id(self, line):
-        """GATT Client Read Char that doesn't have write permission"""
-        instance_id = line
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndReadCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16))
-
-    def read_invalid_char_by_instance_id(self, line):
-        """GATT Client Read Char that doesn't exists"""
-        instance_id = line
-        self.dut.droid.gattClientReadInvalidCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16))
-
-    def mod_write_desc_by_instance_id(self, line):
-        """GATT Client Write to Desc that doesn't have write permission"""
-        cmd = ""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndWriteDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def write_invalid_desc_by_instance_id(self, line):
-        """GATT Client Write to Desc that doesn't exists"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteInvalidDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def mod_read_desc_by_instance_id(self, line):
-        """GATT Client Read Desc that doesn't have write permission"""
-        cmd = ""
-        instance_id = line
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndReadDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16))
-
-    def read_invalid_desc_by_instance_id(self, line):
-        """GATT Client Read Desc that doesn't exists"""
-        instance_id = line
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadInvalidDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16))
-
-    def mod_read_char_by_uuid_and_instance_id(self, line):
-        """GATT Client Read Char that doesn't have write permission"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [uuid] [instance_id]")
-            return
-        uuid = args[0]
-        instance_id = args[1]
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndReadCharacteristicByUuidAndInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), self.generic_uuid.format(uuid))
-
-    def read_invalid_char_by_uuid(self, line):
-        """GATT Client Read Char that doesn't exists"""
-        uuid = line
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadInvalidCharacteristicByUuid(
-            self.bluetooth_gatt, self.discovered_services_index,
-            self.generic_uuid.format(uuid))
-
-    def write_desc_by_instance_id(self, line):
-        """GATT Client Write to Descriptor by instance ID"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [instanceID] [size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def write_desc_notification_by_instance_id(self, line):
-        """GATT Client Write to Descriptor by instance ID"""
-        args = line.split()
-        instance_id = args[0]
-        switch = int(args[1])
-        write_value = [0x00, 0x00]
-        if switch == 2:
-            write_value = [0x02, 0x00]
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def enable_notification_desc_by_instance_id(self, line):
-        """GATT Client Enable Notification on Descriptor by instance ID"""
-        instance_id = line
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                descriptor_uuids = (
-                    self.dut.droid.
-                    gattClientGetDiscoveredDescriptorUuidsByIndex(
-                        self.discovered_services_index, i, j))
-                for k in range(len(descriptor_uuids)):
-                    desc_inst_id = self.dut.droid.gattClientGetDescriptorInstanceId(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j, k)
-                    if desc_inst_id == int(instance_id, 16):
-                        self.dut.droid.gattClientDescriptorSetValueByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k,
-                            gatt_descriptor['enable_notification_value'])
-                        time.sleep(2)  #Necessary for PTS
-                        self.dut.droid.gattClientWriteDescriptorByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k)
-                        time.sleep(2)  #Necessary for PTS
-                        self.dut.droid.gattClientSetCharacteristicNotificationByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, True)
-
-    def enable_indication_desc_by_instance_id(self, line):
-        """GATT Client Enable indication on Descriptor by instance ID"""
-        instance_id = line
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                descriptor_uuids = (
-                    self.dut.droid.
-                    gattClientGetDiscoveredDescriptorUuidsByIndex(
-                        self.discovered_services_index, i, j))
-                for k in range(len(descriptor_uuids)):
-                    desc_inst_id = self.dut.droid.gattClientGetDescriptorInstanceId(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j, k)
-                    if desc_inst_id == int(instance_id, 16):
-                        self.dut.droid.gattClientDescriptorSetValueByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k,
-                            gatt_descriptor['enable_indication_value'])
-                        time.sleep(2)  #Necessary for PTS
-                        self.dut.droid.gattClientWriteDescriptorByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k)
-                        time.sleep(2)  #Necessary for PTS
-                        self.dut.droid.gattClientSetCharacteristicNotificationByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, True)
-
-    def char_enable_all_notifications(self):
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                self.dut.droid.gattClientSetCharacteristicNotificationByIndex(
-                    self.bluetooth_gatt, self.discovered_services_index, i, j,
-                    True)
-
-    def read_char_by_invalid_instance_id(self, line):
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        self.dut.droid.gattClientReadInvalidCharacteristicInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index, 0,
-            int(line, 16))
-
-    def begin_reliable_write(self):
-        """Begin a reliable write on the Bluetooth Gatt Client"""
-        self.dut.droid.gattClientBeginReliableWrite(self.bluetooth_gatt)
-
-    def abort_reliable_write(self):
-        """Abort a reliable write on the Bluetooth Gatt Client"""
-        self.dut.droid.gattClientAbortReliableWrite(self.bluetooth_gatt)
-
-    def execute_reliable_write(self):
-        """Execute a reliable write on the Bluetooth Gatt Client"""
-        self.dut.droid.gattExecuteReliableWrite(self.bluetooth_gatt)
-
-    def read_all_char(self):
-        """GATT Client read all Characteristic values"""
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                char_inst_id = self.dut.droid.gattClientGetCharacteristicInstanceId(
-                    self.bluetooth_gatt, self.discovered_services_index, i, j)
-                self.log.info("Reading characteristic {} {}".format(
-                    hex(char_inst_id), characteristic_uuids[j]))
-                self.dut.droid.gattClientReadCharacteristicByIndex(
-                    self.bluetooth_gatt, self.discovered_services_index, i, j)
-                time.sleep(1)  # Necessary for PTS
-
-    def read_all_desc(self):
-        """GATT Client read all Descriptor values"""
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                descriptor_uuids = (
-                    self.dut.droid.
-                    gattClientGetDiscoveredDescriptorUuidsByIndex(
-                        self.discovered_services_index, i, j))
-                for k in range(len(descriptor_uuids)):
-                    time.sleep(1)
-                    try:
-                        self.log.info("Reading descriptor {}".format(
-                            descriptor_uuids[k]))
-                        self.dut.droid.gattClientReadDescriptorByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k)
-                    except Exception as err:
-                        self.log.info(
-                            "Failed to read to descriptor: {}".format(
-                                descriptor_uuids[k]))
-
-    def write_all_char(self, line):
-        """Write to every Characteristic on the GATT server"""
-        args = line.split()
-        write_value = []
-        for i in range(int(line)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                char_inst_id = self.dut.droid.gattClientGetCharacteristicInstanceId(
-                    self.bluetooth_gatt, self.discovered_services_index, i, j)
-                self.log.info("Writing to {} {}".format(
-                    hex(char_inst_id), characteristic_uuids[j]))
-                try:
-                    self.dut.droid.gattClientCharacteristicSetValueByIndex(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j, write_value)
-                    self.dut.droid.gattClientWriteCharacteristicByIndex(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j)
-                    time.sleep(1)
-                except Exception as err:
-                    self.log.info(
-                        "Failed to write to characteristic: {}".format(
-                            characteristic_uuids[j]))
-
-    def write_all_desc(self, line):
-        """ Write to every Descriptor on the GATT server """
-        args = line.split()
-        write_value = []
-        for i in range(int(line)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                descriptor_uuids = (
-                    self.dut.droid.
-                    gattClientGetDiscoveredDescriptorUuidsByIndex(
-                        self.discovered_services_index, i, j))
-                for k in range(len(descriptor_uuids)):
-                    time.sleep(1)
-                    desc_inst_id = self.dut.droid.gattClientGetDescriptorInstanceId(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j, k)
-                    self.log.info("Writing to {} {}".format(
-                        hex(desc_inst_id), descriptor_uuids[k]))
-                    try:
-                        self.dut.droid.gattClientDescriptorSetValueByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k,
-                            write_value)
-                        self.dut.droid.gattClientWriteDescriptorByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k)
-                    except Exception as err:
-                        self.log.info(
-                            "Failed to write to descriptor: {}".format(
-                                descriptor_uuids[k]))
-
-    def discover_service_by_uuid(self, line):
-        """ Discover service by UUID """
-        uuid = line
-        if len(line) == 4:
-            uuid = self.generic_uuid.format(line)
-        self.dut.droid.gattClientDiscoverServiceByUuid(self.bluetooth_gatt,
-                                                       uuid)
-
-    def request_le_connection_parameters(self):
-        le_min_ce_len = 0
-        le_max_ce_len = 0
-        le_connection_interval = 0
-        minInterval = default_le_connection_interval_ms / le_connection_interval_time_step_ms
-        maxInterval = default_le_connection_interval_ms / le_connection_interval_time_step_ms
-        return_status = self.dut.droid.gattClientRequestLeConnectionParameters(
-            self.bluetooth_gatt, minInterval, maxInterval, 0,
-            le_default_supervision_timeout, le_min_ce_len, le_max_ce_len)
-        self.log.info(
-            "Result of request le connection param: {}".format(return_status))
-
-    def socket_conn_begin_connect_thread_psm(self, line):
-        args = line.split()
-        is_ble = bool(int(args[0]))
-        secured_conn = bool(int(args[1]))
-        psm_value = int(args[2])  # 1
-        self.dut.droid.bluetoothSocketConnBeginConnectThreadPsm(
-            self.target_mac_addr, is_ble, psm_value, secured_conn)
-
-    def socket_conn_begin_accept_thread_psm(self, line):
-        accept_timeout_ms = default_bluetooth_socket_timeout_ms
-        is_ble = True
-        secured_conn = False
-        self.dut.droid.bluetoothSocketConnBeginAcceptThreadPsm(
-            accept_timeout_ms, is_ble, secured_conn)
diff --git a/src/antlion/test_utils/bt/gatts_lib.py b/src/antlion/test_utils/bt/gatts_lib.py
deleted file mode 100644
index e7828d0..0000000
--- a/src/antlion/test_utils/bt/gatts_lib.py
+++ /dev/null
@@ -1,380 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion.keys import Config
-from antlion.utils import rand_ascii_str
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_characteristic_value_format
-from antlion.test_utils.bt.bt_constants import gatt_cb_err
-from antlion.test_utils.bt.bt_constants import gatt_transport
-from antlion.test_utils.bt.bt_constants import gatt_event
-from antlion.test_utils.bt.bt_constants import gatt_server_responses
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import small_timeout
-from antlion.test_utils.bt.gatt_test_database import STRING_512BYTES
-
-from antlion.utils import exe_cmd
-from math import ceil
-
-
-class GattServerLib():
-
-    characteristic_list = []
-    default_timeout = 10
-    descriptor_list = []
-    dut = None
-    gatt_server = None
-    gatt_server_callback = None
-    gatt_server_list = []
-    log = None
-    service_list = []
-    write_mapping = {}
-
-    def __init__(self, log, dut):
-        self.dut = dut
-        self.log = log
-
-    def list_all_uuids(self):
-        """From the GATT Client, discover services and list all services,
-        chars and descriptors.
-        """
-        self.log.info("Service List:")
-        for service in self.dut.droid.gattGetServiceUuidList(self.gatt_server):
-            self.dut.log.info("GATT Server service uuid: {}".format(service))
-        self.log.info("Characteristics List:")
-        for characteristic in self.characteristic_list:
-            instance_id = self.dut.droid.gattServerGetCharacteristicInstanceId(
-                characteristic)
-            uuid = self.dut.droid.gattServerGetCharacteristicUuid(
-                characteristic)
-            self.dut.log.info(
-                "GATT Server characteristic handle uuid: {} {}".format(
-                    hex(instance_id), uuid))
-        # TODO: add getting insance ids and uuids from each descriptor.
-
-    def open(self):
-        """Open an empty GATT Server instance"""
-        self.gatt_server_callback = self.dut.droid.gattServerCreateGattServerCallback(
-        )
-        self.gatt_server = self.dut.droid.gattServerOpenGattServer(
-            self.gatt_server_callback)
-        self.gatt_server_list.append(self.gatt_server)
-
-    def clear_services(self):
-        """Clear BluetoothGattServices from BluetoothGattServer"""
-        self.dut.droid.gattServerClearServices(self.gatt_server)
-
-    def close_bluetooth_gatt_servers(self):
-        """Close Bluetooth Gatt Servers"""
-        try:
-            for btgs in self.gatt_server_list:
-                self.dut.droid.gattServerClose(btgs)
-        except Exception as err:
-            self.log.error(
-                "Failed to close Bluetooth GATT Servers: {}".format(err))
-        self.characteristic_list = []
-        self.descriptor_list = []
-        self.gatt_server_list = []
-        self.service_list = []
-
-    def characteristic_set_value_by_instance_id(self, instance_id, value):
-        """Set Characteristic value by instance id"""
-        self.dut.droid.gattServerCharacteristicSetValueByInstanceId(
-            int(instance_id, 16), value)
-
-    def notify_characteristic_changed(self, instance_id, confirm):
-        """ Notify characteristic changed """
-        self.dut.droid.gattServerNotifyCharacteristicChangedByInstanceId(
-            self.gatt_server, 0, int(instance_id, 16), confirm)
-
-    def send_response(self, user_input):
-        """Send a single response to the GATT Client"""
-        args = user_input.split()
-        mtu = 23
-        if len(args) == 2:
-            user_input = args[0]
-            mtu = int(args[1])
-        desc_read = gatt_event['desc_read_req']['evt'].format(
-            self.gatt_server_callback)
-        desc_write = gatt_event['desc_write_req']['evt'].format(
-            self.gatt_server_callback)
-        char_read = gatt_event['char_read_req']['evt'].format(
-            self.gatt_server_callback)
-        char_write_req = gatt_event['char_write_req']['evt'].format(
-            self.gatt_server_callback)
-        char_write = gatt_event['char_write']['evt'].format(
-            self.gatt_server_callback)
-        execute_write = gatt_event['exec_write']['evt'].format(
-            self.gatt_server_callback)
-        regex = "({}|{}|{}|{}|{}|{})".format(desc_read, desc_write, char_read,
-                                             char_write, execute_write,
-                                             char_write_req)
-        events = self.dut.ed.pop_events(regex, 5, small_timeout)
-        status = 0
-        if user_input:
-            status = gatt_server_responses.get(user_input)
-        for event in events:
-            self.log.debug("Found event: {}.".format(event))
-            request_id = event['data']['requestId']
-            if event['name'] == execute_write:
-                if ('execute' in event['data']
-                        and event['data']['execute'] == True):
-                    for key in self.write_mapping:
-                        value = self.write_mapping[key]
-                        self.log.info("Writing key, value: {}, {}".format(
-                            key, value))
-                        self.dut.droid.gattServerSetByteArrayValueByInstanceId(
-                            key, value)
-                else:
-                    self.log.info("Execute result is false")
-                self.write_mapping = {}
-                self.dut.droid.gattServerSendResponse(self.gatt_server, 0,
-                                                      request_id, status, 0,
-                                                      [])
-                continue
-            offset = event['data']['offset']
-            instance_id = event['data']['instanceId']
-            if (event['name'] == desc_write or event['name'] == char_write
-                    or event['name'] == char_write_req):
-                if ('preparedWrite' in event['data']
-                        and event['data']['preparedWrite'] == True):
-                    value = event['data']['value']
-                    if instance_id in self.write_mapping.keys():
-                        self.write_mapping[instance_id] = self.write_mapping[
-                            instance_id] + value
-                        self.log.info(
-                            "New Prepared Write Value for {}: {}".format(
-                                instance_id, self.write_mapping[instance_id]))
-                    else:
-                        self.log.info("write mapping key, value {}, {}".format(
-                            instance_id, value))
-                        self.write_mapping[instance_id] = value
-                        self.log.info("current value {}, {}".format(
-                            instance_id, value))
-                    self.dut.droid.gattServerSendResponse(
-                        self.gatt_server, 0, request_id, status, 0, value)
-                    continue
-                else:
-                    self.dut.droid.gattServerSetByteArrayValueByInstanceId(
-                        event['data']['instanceId'], event['data']['value'])
-
-            try:
-                data = self.dut.droid.gattServerGetReadValueByInstanceId(
-                    int(event['data']['instanceId']))
-            except Exception as err:
-                self.log.error(err)
-            if not data:
-                data = [1]
-            self.log.info(
-                "GATT Server Send Response [request_id, status, offset, data]" \
-                " [{}, {}, {}, {}]".
-                format(request_id, status, offset, data))
-            data = data[offset:offset + mtu - 1]
-            self.dut.droid.gattServerSendResponse(self.gatt_server, 0,
-                                                  request_id, status, offset,
-                                                  data)
-
-    def _setup_service(self, serv):
-        service = self.dut.droid.gattServerCreateService(
-            serv['uuid'], serv['type'])
-        if 'handles' in serv:
-            self.dut.droid.gattServerServiceSetHandlesToReserve(
-                service, serv['handles'])
-        return service
-
-    def _setup_characteristic(self, char):
-        characteristic = \
-            self.dut.droid.gattServerCreateBluetoothGattCharacteristic(
-                char['uuid'], char['properties'], char['permissions'])
-        if 'instance_id' in char:
-            self.dut.droid.gattServerCharacteristicSetInstanceId(
-                characteristic, char['instance_id'])
-            set_id = self.dut.droid.gattServerCharacteristicGetInstanceId(
-                characteristic)
-            if set_id != char['instance_id']:
-                self.log.error(
-                    "Instance ID did not match up. Found {} Expected {}".
-                    format(set_id, char['instance_id']))
-        if 'value_type' in char:
-            value_type = char['value_type']
-            value = char['value']
-            if value_type == gatt_characteristic_value_format['string']:
-                self.log.info("Set String value result: {}".format(
-                    self.dut.droid.gattServerCharacteristicSetStringValue(
-                        characteristic, value)))
-            elif value_type == gatt_characteristic_value_format['byte']:
-                self.log.info("Set Byte Array value result: {}".format(
-                    self.dut.droid.gattServerCharacteristicSetByteValue(
-                        characteristic, value)))
-            else:
-                self.log.info("Set Int value result: {}".format(
-                    self.dut.droid.gattServerCharacteristicSetIntValue(
-                        characteristic, value, value_type, char['offset'])))
-        return characteristic
-
-    def _setup_descriptor(self, desc):
-        descriptor = self.dut.droid.gattServerCreateBluetoothGattDescriptor(
-            desc['uuid'], desc['permissions'])
-        if 'value' in desc:
-            self.dut.droid.gattServerDescriptorSetByteValue(
-                descriptor, desc['value'])
-        if 'instance_id' in desc:
-            self.dut.droid.gattServerDescriptorSetInstanceId(
-                descriptor, desc['instance_id'])
-        self.descriptor_list.append(descriptor)
-        return descriptor
-
-    def setup_gatts_db(self, database):
-        """Setup GATT Server database"""
-        self.gatt_server_callback = \
-            self.dut.droid.gattServerCreateGattServerCallback()
-        self.gatt_server = self.dut.droid.gattServerOpenGattServer(
-            self.gatt_server_callback)
-        self.gatt_server_list.append(self.gatt_server)
-        for serv in database['services']:
-            service = self._setup_service(serv)
-            self.service_list.append(service)
-            if 'characteristics' in serv:
-                for char in serv['characteristics']:
-                    characteristic = self._setup_characteristic(char)
-                    if 'descriptors' in char:
-                        for desc in char['descriptors']:
-                            descriptor = self._setup_descriptor(desc)
-                            self.dut.droid.gattServerCharacteristicAddDescriptor(
-                                characteristic, descriptor)
-                    self.characteristic_list.append(characteristic)
-                    self.dut.droid.gattServerAddCharacteristicToService(
-                        service, characteristic)
-            self.dut.droid.gattServerAddService(self.gatt_server, service)
-            expected_event = gatt_cb_strings['serv_added'].format(
-                self.gatt_server_callback)
-            self.dut.ed.pop_event(expected_event, 10)
-        return self.gatt_server, self.gatt_server_callback
-
-    def send_continuous_response(self, user_input):
-        """Send the same response"""
-        desc_read = gatt_event['desc_read_req']['evt'].format(
-            self.gatt_server_callback)
-        desc_write = gatt_event['desc_write_req']['evt'].format(
-            self.gatt_server_callback)
-        char_read = gatt_event['char_read_req']['evt'].format(
-            self.gatt_server_callback)
-        char_write = gatt_event['char_write']['evt'].format(
-            self.gatt_server_callback)
-        execute_write = gatt_event['exec_write']['evt'].format(
-            self.gatt_server_callback)
-        regex = "({}|{}|{}|{}|{})".format(desc_read, desc_write, char_read,
-                                          char_write, execute_write)
-        offset = 0
-        status = 0
-        mtu = 23
-        char_value = []
-        for i in range(512):
-            char_value.append(i % 256)
-        len_min = 470
-        end_time = time.time() + 180
-        i = 0
-        num_packets = ceil((len(char_value) + 1) / (mtu - 1))
-        while time.time() < end_time:
-            events = self.dut.ed.pop_events(regex, 10, small_timeout)
-            for event in events:
-                start_offset = i * (mtu - 1)
-                i += 1
-                self.log.debug("Found event: {}.".format(event))
-                request_id = event['data']['requestId']
-                data = char_value[start_offset:start_offset + mtu - 1]
-                if not data:
-                    data = [1]
-                self.log.debug(
-                    "GATT Server Send Response [request_id, status, offset, " \
-                    "data] [{}, {}, {}, {}]".format(request_id, status, offset,
-                        data))
-                self.dut.droid.gattServerSendResponse(self.gatt_server, 0,
-                                                      request_id, status,
-                                                      offset, data)
-
-    def send_continuous_response_data(self, user_input):
-        """Send the same response with data"""
-        desc_read = gatt_event['desc_read_req']['evt'].format(
-            self.gatt_server_callback)
-        desc_write = gatt_event['desc_write_req']['evt'].format(
-            self.gatt_server_callback)
-        char_read = gatt_event['char_read_req']['evt'].format(
-            self.gatt_server_callback)
-        char_write = gatt_event['char_write']['evt'].format(
-            self.gatt_server_callback)
-        execute_write = gatt_event['exec_write']['evt'].format(
-            self.gatt_server_callback)
-        regex = "({}|{}|{}|{}|{})".format(desc_read, desc_write, char_read,
-                                          char_write, execute_write)
-        offset = 0
-        status = 0
-        mtu = 11
-        char_value = []
-        len_min = 470
-        end_time = time.time() + 180
-        i = 0
-        num_packets = ceil((len(char_value) + 1) / (mtu - 1))
-        while time.time() < end_time:
-            events = self.dut.ed.pop_events(regex, 10, small_timeout)
-            for event in events:
-                self.log.info(event)
-                request_id = event['data']['requestId']
-                if event['name'] == execute_write:
-                    if ('execute' in event['data']
-                            and event['data']['execute'] == True):
-                        for key in self.write_mapping:
-                            value = self.write_mapping[key]
-                            self.log.debug("Writing key, value: {}, {}".format(
-                                key, value))
-                            self.dut.droid.gattServerSetByteArrayValueByInstanceId(
-                                key, value)
-                        self.write_mapping = {}
-                    self.dut.droid.gattServerSendResponse(
-                        self.gatt_server, 0, request_id, status, 0, [1])
-                    continue
-                offset = event['data']['offset']
-                instance_id = event['data']['instanceId']
-                if (event['name'] == desc_write
-                        or event['name'] == char_write):
-                    if ('preparedWrite' in event['data']
-                            and event['data']['preparedWrite'] == True):
-                        value = event['data']['value']
-                        if instance_id in self.write_mapping:
-                            self.write_mapping[
-                                instance_id] = self.write_mapping[
-                                    instance_id] + value
-                        else:
-                            self.write_mapping[instance_id] = value
-                    else:
-                        self.dut.droid.gattServerSetByteArrayValueByInstanceId(
-                            event['data']['instanceId'],
-                            event['data']['value'])
-                try:
-                    data = self.dut.droid.gattServerGetReadValueByInstanceId(
-                        int(event['data']['instanceId']))
-                except Exception as err:
-                    self.log.error(err)
-                if not data:
-                    self.dut.droid.gattServerSendResponse(
-                        self.gatt_server, 0, request_id, status, offset, [1])
-                else:
-                    self.dut.droid.gattServerSendResponse(
-                        self.gatt_server, 0, request_id, status, offset,
-                        data[offset:offset + 17])
diff --git a/src/antlion/test_utils/bt/native_bt_test_utils.py b/src/antlion/test_utils/bt/native_bt_test_utils.py
deleted file mode 100644
index 822de8c..0000000
--- a/src/antlion/test_utils/bt/native_bt_test_utils.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from subprocess import call
-import time
-
-log = logging
-
-
-def setup_native_bluetooth(native_devices):
-    for n in native_devices:
-        droid = n.droid
-        pid = n.adb.shell("pidof -s bluetoothtbd")
-        if not pid:
-            call(
-                ["adb -s " + n.serial + " shell sh -c \"bluetoothtbd\" &"],
-                shell=True)
-        droid.BtBinderInitInterface()
-        time.sleep(5)  #temporary sleep statement
-        droid.BtBinderEnable()
-        time.sleep(5)  #temporary sleep statement
-        droid.BtBinderRegisterBLE()
-        time.sleep(5)  #temporary sleep statement
diff --git a/src/antlion/test_utils/bt/protos/__init__.py b/src/antlion/test_utils/bt/protos/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/bt/protos/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/bt/protos/bluetooth.proto b/src/antlion/test_utils/bt/protos/bluetooth.proto
deleted file mode 100644
index 969dbd6..0000000
--- a/src/antlion/test_utils/bt/protos/bluetooth.proto
+++ /dev/null
@@ -1,301 +0,0 @@
- /*
-  * Copyright 2022 The Fuchsia Authors
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-syntax = "proto2";
-
-option optimize_for = LITE_RUNTIME;
-
-// C++ namespace: bluetooth::metrics::BluetoothMetricsProto
-package bluetooth.metrics.BluetoothMetricsProto;
-
-option java_package = "com.android.bluetooth";
-option java_outer_classname = "BluetoothMetricsProto";
-
-message BluetoothLog {
-  // Session information that gets logged for every BT connection.
-  repeated BluetoothSession session = 1;
-
-  // Session information that gets logged for every Pair event.
-  repeated PairEvent pair_event = 2;
-
-  // Information for Wake locks.
-  repeated WakeEvent wake_event = 3;
-
-  // Scan event information.
-  repeated ScanEvent scan_event = 4;
-
-  // Number of bonded devices.
-  optional int32 num_bonded_devices = 5;
-
-  // Number of BluetoothSession including discarded ones beyond capacity
-  optional int64 num_bluetooth_session = 6;
-
-  // Number of PairEvent including discarded ones beyond capacity
-  optional int64 num_pair_event = 7;
-
-  // Number of WakeEvent including discarded ones beyond capacity
-  optional int64 num_wake_event = 8;
-
-  // Number of ScanEvent including discarded ones beyond capacity
-  optional int64 num_scan_event = 9;
-
-  // Statistics about Bluetooth profile connections
-  repeated ProfileConnectionStats profile_connection_stats = 10;
-
-  // Statistics about Headset profile connections
-  repeated HeadsetProfileConnectionStats headset_profile_connection_stats = 11;
-}
-
-// The information about the device.
-message DeviceInfo {
-  // Device type.
-  enum DeviceType {
-    // Type is unknown.
-    DEVICE_TYPE_UNKNOWN = 0;
-
-    DEVICE_TYPE_BREDR = 1;
-
-    DEVICE_TYPE_LE = 2;
-
-    DEVICE_TYPE_DUMO = 3;
-  }
-
-  // Device class
-  // https://cs.corp.google.com/#android/system/bt/stack/include/btm_api.h&q=major_computer.
-  optional int32 device_class = 1;
-
-  // Device type.
-  optional DeviceType device_type = 2;
-}
-
-// Information that gets logged for every Bluetooth connection.
-message BluetoothSession {
-  // Type of technology used in the connection.
-  enum ConnectionTechnologyType {
-    CONNECTION_TECHNOLOGY_TYPE_UNKNOWN = 0;
-
-    CONNECTION_TECHNOLOGY_TYPE_LE = 1;
-
-    CONNECTION_TECHNOLOGY_TYPE_BREDR = 2;
-  }
-
-  enum DisconnectReasonType {
-    UNKNOWN = 0;
-
-    // A metrics dump takes a snapshot of current Bluetooth session and thus
-    // is not a real disconnect, but a discontinuation in metrics logging.
-    // This enum indicates this situation.
-    METRICS_DUMP = 1;
-
-    NEXT_START_WITHOUT_END_PREVIOUS = 2;
-  }
-
-  // Duration of the session.
-  optional int64 session_duration_sec = 2;
-
-  // Technology type.
-  optional ConnectionTechnologyType connection_technology_type = 3;
-
-  // Reason for disconnecting.
-  optional string disconnect_reason = 4 [deprecated = true];
-
-  // The information about the device which it is connected to.
-  optional DeviceInfo device_connected_to = 5;
-
-  // The information about the RFComm session.
-  optional RFCommSession rfcomm_session = 6;
-
-  // The information about the A2DP audio session.
-  optional A2DPSession a2dp_session = 7;
-
-  // Numeric reason for disconnecting as defined in metrics.h
-  optional DisconnectReasonType disconnect_reason_type = 8;
-}
-
-message RFCommSession {
-  // bytes transmitted.
-  optional int32 rx_bytes = 1;
-
-  // bytes transmitted.
-  optional int32 tx_bytes = 2;
-}
-
-enum A2dpSourceCodec {
-  A2DP_SOURCE_CODEC_UNKNOWN = 0;
-  A2DP_SOURCE_CODEC_SBC = 1;
-  A2DP_SOURCE_CODEC_AAC = 2;
-  A2DP_SOURCE_CODEC_APTX = 3;
-  A2DP_SOURCE_CODEC_APTX_HD = 4;
-  A2DP_SOURCE_CODEC_LDAC = 5;
-}
-
-// Session information that gets logged for A2DP session.
-message A2DPSession {
-  // Media timer in milliseconds.
-  optional int32 media_timer_min_millis = 1;
-
-  // Media timer in milliseconds.
-  optional int32 media_timer_max_millis = 2;
-
-  // Media timer in milliseconds.
-  optional int32 media_timer_avg_millis = 3;
-
-  // Buffer overruns count.
-  optional int32 buffer_overruns_max_count = 4;
-
-  // Buffer overruns total.
-  optional int32 buffer_overruns_total = 5;
-
-  // Buffer underruns average.
-  optional float buffer_underruns_average = 6;
-
-  // Buffer underruns count.
-  optional int32 buffer_underruns_count = 7;
-
-  // Total audio time in this A2DP session
-  optional int64 audio_duration_millis = 8;
-
-  // Audio codec used in this A2DP session in A2DP source role
-  optional A2dpSourceCodec source_codec = 9;
-
-  // Whether A2DP offload is enabled in this A2DP session
-  optional bool is_a2dp_offload = 10;
-}
-
-message PairEvent {
-  // The reason for disconnecting
-  // See: system/bt/stack/include/hcidefs.h, HCI_ERR_CONN_FAILED_ESTABLISHMENT
-  optional int32 disconnect_reason = 1;
-
-  // Pair event time
-  optional int64 event_time_millis =
-      2;  // [(datapol.semantic_type) = ST_TIMESTAMP];
-
-  // The information about the device which it is paired to.
-  optional DeviceInfo device_paired_with = 3;
-}
-
-message WakeEvent {
-  // Information about the wake event type.
-  enum WakeEventType {
-    UNKNOWN = 0;
-    // WakeLock was acquired.
-    ACQUIRED = 1;
-    // WakeLock was released.
-    RELEASED = 2;
-  }
-
-  // Information about the wake event type.
-  optional WakeEventType wake_event_type = 1;
-
-  // Initiator of the scan. Only the first three names will be stored.
-  // e.g. com.company.app
-  optional string requestor = 2;
-
-  // Name of the wakelock (e.g. bluedroid_timer).
-  optional string name = 3;
-
-  // Time of the event.
-  optional int64 event_time_millis =
-      4;  // [(datapol.semantic_type) = ST_TIMESTAMP];
-}
-
-message ScanEvent {
-  // Scan type.
-  enum ScanTechnologyType {
-    SCAN_TYPE_UNKNOWN = 0;
-
-    SCAN_TECH_TYPE_LE = 1;
-
-    SCAN_TECH_TYPE_BREDR = 2;
-
-    SCAN_TECH_TYPE_BOTH = 3;
-  }
-
-  // Scan event type.
-  enum ScanEventType {
-    // Scan started.
-    SCAN_EVENT_START = 0;
-    // Scan stopped.
-    SCAN_EVENT_STOP = 1;
-  }
-
-  // Scan event type.
-  optional ScanEventType scan_event_type = 1;
-
-  // Initiator of the scan. Only the first three names will be stored.
-  // e.g. com.company.app
-  optional string initiator = 2;
-
-  // Technology used for scanning.
-  optional ScanTechnologyType scan_technology_type = 3;
-
-  // Number of results returned.
-  optional int32 number_results = 4;
-
-  // Time of the event.
-  optional int64 event_time_millis =
-      5;  // [(datapol.semantic_type) = ST_TIMESTAMP];
-}
-
-// Profile IDs defined in BluetoothProfile API class
-// Values must match API class values
-enum ProfileId {
-  PROFILE_UNKNOWN = 0;
-  HEADSET = 1;
-  A2DP = 2;
-  HEALTH = 3;
-  HID_HOST = 4;
-  PAN = 5;
-  PBAP = 6;
-  GATT = 7;
-  GATT_SERVER = 8;
-  MAP = 9;
-  SAP = 10;
-  A2DP_SINK = 11;
-  AVRCP_CONTROLLER = 12;
-  AVRCP = 13;
-  HEADSET_CLIENT = 16;
-  PBAP_CLIENT = 17;
-  MAP_CLIENT = 18;
-  HID_DEVICE = 19;
-  OPP = 20;
-  HEARING_AID = 21;
-}
-
-// Statistics about Bluetooth profile connections
-message ProfileConnectionStats {
-  // Profile id defined in BluetoothProfile.java
-  optional ProfileId profile_id = 1;
-
-  // Number of times that this profile is connected since last metrics dump
-  optional int32 num_times_connected = 2;
-}
-
-enum HeadsetProfileType {
-  HEADSET_PROFILE_UNKNOWN = 0;
-  HSP = 1;
-  HFP = 2;
-}
-
-// Statistics about headset profile connections
-message HeadsetProfileConnectionStats {
-  // Type of headset profile connected
-  optional HeadsetProfileType headset_profile_type = 1;
-
-  // Number of times this type of headset profile is connected
-  optional int32 num_times_connected = 2;
-}
diff --git a/src/antlion/test_utils/bt/protos/bluetooth_pb2.py b/src/antlion/test_utils/bt/protos/bluetooth_pb2.py
deleted file mode 100644
index 1188f77..0000000
--- a/src/antlion/test_utils/bt/protos/bluetooth_pb2.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: bluetooth.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x62luetooth.proto\x12\'bluetooth.metrics.BluetoothMetricsProto\"\x8a\x05\n\x0c\x42luetoothLog\x12J\n\x07session\x18\x01 \x03(\x0b\x32\x39.bluetooth.metrics.BluetoothMetricsProto.BluetoothSession\x12\x46\n\npair_event\x18\x02 \x03(\x0b\x32\x32.bluetooth.metrics.BluetoothMetricsProto.PairEvent\x12\x46\n\nwake_event\x18\x03 \x03(\x0b\x32\x32.bluetooth.metrics.BluetoothMetricsProto.WakeEvent\x12\x46\n\nscan_event\x18\x04 \x03(\x0b\x32\x32.bluetooth.metrics.BluetoothMetricsProto.ScanEvent\x12\x1a\n\x12num_bonded_devices\x18\x05 \x01(\x05\x12\x1d\n\x15num_bluetooth_session\x18\x06 \x01(\x03\x12\x16\n\x0enum_pair_event\x18\x07 \x01(\x03\x12\x16\n\x0enum_wake_event\x18\x08 \x01(\x03\x12\x16\n\x0enum_scan_event\x18\t \x01(\x03\x12\x61\n\x18profile_connection_stats\x18\n \x03(\x0b\x32?.bluetooth.metrics.BluetoothMetricsProto.ProfileConnectionStats\x12p\n headset_profile_connection_stats\x18\x0b \x03(\x0b\x32\x46.bluetooth.metrics.BluetoothMetricsProto.HeadsetProfileConnectionStats\"\xdf\x01\n\nDeviceInfo\x12\x14\n\x0c\x64\x65vice_class\x18\x01 \x01(\x05\x12S\n\x0b\x64\x65vice_type\x18\x02 \x01(\x0e\x32>.bluetooth.metrics.BluetoothMetricsProto.DeviceInfo.DeviceType\"f\n\nDeviceType\x12\x17\n\x13\x44\x45VICE_TYPE_UNKNOWN\x10\x00\x12\x15\n\x11\x44\x45VICE_TYPE_BREDR\x10\x01\x12\x12\n\x0e\x44\x45VICE_TYPE_LE\x10\x02\x12\x14\n\x10\x44\x45VICE_TYPE_DUMO\x10\x03\"\x8f\x06\n\x10\x42luetoothSession\x12\x1c\n\x14session_duration_sec\x18\x02 \x01(\x03\x12v\n\x1a\x63onnection_technology_type\x18\x03 \x01(\x0e\x32R.bluetooth.metrics.BluetoothMetricsProto.BluetoothSession.ConnectionTechnologyType\x12\x1d\n\x11\x64isconnect_reason\x18\x04 \x01(\tB\x02\x18\x01\x12P\n\x13\x64\x65vice_connected_to\x18\x05 \x01(\x0b\x32\x33.bluetooth.metrics.BluetoothMetricsProto.DeviceInfo\x12N\n\x0erfcomm_session\x18\x06 \x01(\x0b\x32\x36.bluetooth.metrics.BluetoothMetricsProto.RFCommSession\x12J\n\x0c\x61\x32\x64p_session\x18\x07 \x01(\x0b\x32\x34.bluetooth.metrics.BluetoothMetricsProto.A2DPSession\x12n\n\x16\x64isconnect_reason_type\x18\x08 \x01(\x0e\x32N.bluetooth.metrics.BluetoothMetricsProto.BluetoothSession.DisconnectReasonType\"\x8b\x01\n\x18\x43onnectionTechnologyType\x12&\n\"CONNECTION_TECHNOLOGY_TYPE_UNKNOWN\x10\x00\x12!\n\x1d\x43ONNECTION_TECHNOLOGY_TYPE_LE\x10\x01\x12$\n CONNECTION_TECHNOLOGY_TYPE_BREDR\x10\x02\"Z\n\x14\x44isconnectReasonType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x10\n\x0cMETRICS_DUMP\x10\x01\x12#\n\x1fNEXT_START_WITHOUT_END_PREVIOUS\x10\x02\"3\n\rRFCommSession\x12\x10\n\x08rx_bytes\x18\x01 \x01(\x05\x12\x10\n\x08tx_bytes\x18\x02 \x01(\x05\"\xf9\x02\n\x0b\x41\x32\x44PSession\x12\x1e\n\x16media_timer_min_millis\x18\x01 \x01(\x05\x12\x1e\n\x16media_timer_max_millis\x18\x02 \x01(\x05\x12\x1e\n\x16media_timer_avg_millis\x18\x03 \x01(\x05\x12!\n\x19\x62uffer_overruns_max_count\x18\x04 \x01(\x05\x12\x1d\n\x15\x62uffer_overruns_total\x18\x05 \x01(\x05\x12 \n\x18\x62uffer_underruns_average\x18\x06 \x01(\x02\x12\x1e\n\x16\x62uffer_underruns_count\x18\x07 \x01(\x05\x12\x1d\n\x15\x61udio_duration_millis\x18\x08 \x01(\x03\x12N\n\x0csource_codec\x18\t \x01(\x0e\x32\x38.bluetooth.metrics.BluetoothMetricsProto.A2dpSourceCodec\x12\x17\n\x0fis_a2dp_offload\x18\n \x01(\x08\"\x92\x01\n\tPairEvent\x12\x19\n\x11\x64isconnect_reason\x18\x01 \x01(\x05\x12\x19\n\x11\x65vent_time_millis\x18\x02 \x01(\x03\x12O\n\x12\x64\x65vice_paired_with\x18\x03 \x01(\x0b\x32\x33.bluetooth.metrics.BluetoothMetricsProto.DeviceInfo\"\xdc\x01\n\tWakeEvent\x12Y\n\x0fwake_event_type\x18\x01 \x01(\x0e\x32@.bluetooth.metrics.BluetoothMetricsProto.WakeEvent.WakeEventType\x12\x11\n\trequestor\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x11\x65vent_time_millis\x18\x04 \x01(\x03\"8\n\rWakeEventType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x43QUIRED\x10\x01\x12\x0c\n\x08RELEASED\x10\x02\"\xc4\x03\n\tScanEvent\x12Y\n\x0fscan_event_type\x18\x01 \x01(\x0e\x32@.bluetooth.metrics.BluetoothMetricsProto.ScanEvent.ScanEventType\x12\x11\n\tinitiator\x18\x02 \x01(\t\x12\x63\n\x14scan_technology_type\x18\x03 \x01(\x0e\x32\x45.bluetooth.metrics.BluetoothMetricsProto.ScanEvent.ScanTechnologyType\x12\x16\n\x0enumber_results\x18\x04 \x01(\x05\x12\x19\n\x11\x65vent_time_millis\x18\x05 \x01(\x03\"u\n\x12ScanTechnologyType\x12\x15\n\x11SCAN_TYPE_UNKNOWN\x10\x00\x12\x15\n\x11SCAN_TECH_TYPE_LE\x10\x01\x12\x18\n\x14SCAN_TECH_TYPE_BREDR\x10\x02\x12\x17\n\x13SCAN_TECH_TYPE_BOTH\x10\x03\":\n\rScanEventType\x12\x14\n\x10SCAN_EVENT_START\x10\x00\x12\x13\n\x0fSCAN_EVENT_STOP\x10\x01\"}\n\x16ProfileConnectionStats\x12\x46\n\nprofile_id\x18\x01 \x01(\x0e\x32\x32.bluetooth.metrics.BluetoothMetricsProto.ProfileId\x12\x1b\n\x13num_times_connected\x18\x02 \x01(\x05\"\x97\x01\n\x1dHeadsetProfileConnectionStats\x12Y\n\x14headset_profile_type\x18\x01 \x01(\x0e\x32;.bluetooth.metrics.BluetoothMetricsProto.HeadsetProfileType\x12\x1b\n\x13num_times_connected\x18\x02 \x01(\x05*\xbd\x01\n\x0f\x41\x32\x64pSourceCodec\x12\x1d\n\x19\x41\x32\x44P_SOURCE_CODEC_UNKNOWN\x10\x00\x12\x19\n\x15\x41\x32\x44P_SOURCE_CODEC_SBC\x10\x01\x12\x19\n\x15\x41\x32\x44P_SOURCE_CODEC_AAC\x10\x02\x12\x1a\n\x16\x41\x32\x44P_SOURCE_CODEC_APTX\x10\x03\x12\x1d\n\x19\x41\x32\x44P_SOURCE_CODEC_APTX_HD\x10\x04\x12\x1a\n\x16\x41\x32\x44P_SOURCE_CODEC_LDAC\x10\x05*\xa0\x02\n\tProfileId\x12\x13\n\x0fPROFILE_UNKNOWN\x10\x00\x12\x0b\n\x07HEADSET\x10\x01\x12\x08\n\x04\x41\x32\x44P\x10\x02\x12\n\n\x06HEALTH\x10\x03\x12\x0c\n\x08HID_HOST\x10\x04\x12\x07\n\x03PAN\x10\x05\x12\x08\n\x04PBAP\x10\x06\x12\x08\n\x04GATT\x10\x07\x12\x0f\n\x0bGATT_SERVER\x10\x08\x12\x07\n\x03MAP\x10\t\x12\x07\n\x03SAP\x10\n\x12\r\n\tA2DP_SINK\x10\x0b\x12\x14\n\x10\x41VRCP_CONTROLLER\x10\x0c\x12\t\n\x05\x41VRCP\x10\r\x12\x12\n\x0eHEADSET_CLIENT\x10\x10\x12\x0f\n\x0bPBAP_CLIENT\x10\x11\x12\x0e\n\nMAP_CLIENT\x10\x12\x12\x0e\n\nHID_DEVICE\x10\x13\x12\x07\n\x03OPP\x10\x14\x12\x0f\n\x0bHEARING_AID\x10\x15*C\n\x12HeadsetProfileType\x12\x1b\n\x17HEADSET_PROFILE_UNKNOWN\x10\x00\x12\x07\n\x03HSP\x10\x01\x12\x07\n\x03HFP\x10\x02\x42\x30\n\x15\x63om.android.bluetoothB\x15\x42luetoothMetricsProtoH\x03')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'bluetooth_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\025com.android.bluetoothB\025BluetoothMetricsProtoH\003'
-  _BLUETOOTHSESSION.fields_by_name['disconnect_reason']._options = None
-  _BLUETOOTHSESSION.fields_by_name['disconnect_reason']._serialized_options = b'\030\001'
-  _A2DPSOURCECODEC._serialized_start=3267
-  _A2DPSOURCECODEC._serialized_end=3456
-  _PROFILEID._serialized_start=3459
-  _PROFILEID._serialized_end=3747
-  _HEADSETPROFILETYPE._serialized_start=3749
-  _HEADSETPROFILETYPE._serialized_end=3816
-  _BLUETOOTHLOG._serialized_start=61
-  _BLUETOOTHLOG._serialized_end=711
-  _DEVICEINFO._serialized_start=714
-  _DEVICEINFO._serialized_end=937
-  _DEVICEINFO_DEVICETYPE._serialized_start=835
-  _DEVICEINFO_DEVICETYPE._serialized_end=937
-  _BLUETOOTHSESSION._serialized_start=940
-  _BLUETOOTHSESSION._serialized_end=1723
-  _BLUETOOTHSESSION_CONNECTIONTECHNOLOGYTYPE._serialized_start=1492
-  _BLUETOOTHSESSION_CONNECTIONTECHNOLOGYTYPE._serialized_end=1631
-  _BLUETOOTHSESSION_DISCONNECTREASONTYPE._serialized_start=1633
-  _BLUETOOTHSESSION_DISCONNECTREASONTYPE._serialized_end=1723
-  _RFCOMMSESSION._serialized_start=1725
-  _RFCOMMSESSION._serialized_end=1776
-  _A2DPSESSION._serialized_start=1779
-  _A2DPSESSION._serialized_end=2156
-  _PAIREVENT._serialized_start=2159
-  _PAIREVENT._serialized_end=2305
-  _WAKEEVENT._serialized_start=2308
-  _WAKEEVENT._serialized_end=2528
-  _WAKEEVENT_WAKEEVENTTYPE._serialized_start=2472
-  _WAKEEVENT_WAKEEVENTTYPE._serialized_end=2528
-  _SCANEVENT._serialized_start=2531
-  _SCANEVENT._serialized_end=2983
-  _SCANEVENT_SCANTECHNOLOGYTYPE._serialized_start=2806
-  _SCANEVENT_SCANTECHNOLOGYTYPE._serialized_end=2923
-  _SCANEVENT_SCANEVENTTYPE._serialized_start=2925
-  _SCANEVENT_SCANEVENTTYPE._serialized_end=2983
-  _PROFILECONNECTIONSTATS._serialized_start=2985
-  _PROFILECONNECTIONSTATS._serialized_end=3110
-  _HEADSETPROFILECONNECTIONSTATS._serialized_start=3113
-  _HEADSETPROFILECONNECTIONSTATS._serialized_end=3264
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/test_utils/bt/pts/fuchsia_pts_ics_lib.py b/src/antlion/test_utils/bt/pts/fuchsia_pts_ics_lib.py
deleted file mode 100644
index 8c4d3e9..0000000
--- a/src/antlion/test_utils/bt/pts/fuchsia_pts_ics_lib.py
+++ /dev/null
@@ -1,365 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This is a placeholder for all ICS values in PTS
-    that matter to Fuchsia devices.
-"""
-
-# A2DP Values are just a placeholder.
-A2DP_ICS = {
-    b'TSPC_ALL': b'FALSE',
-    b'TSPC_A2DP_0_1': b'FALSE',
-    b'TSPC_A2DP_0_2': b'FALSE',
-    b'TSPC_A2DP_0_3': b'FALSE',
-    b'TSPC_A2DP_1_1': b'TRUE',
-    b'TSPC_A2DP_1_2': b'TRUE',
-    b'TSPC_A2DP_2_1': b'TRUE',
-    b'TSPC_A2DP_2a_1': b'FALSE',
-    b'TSPC_A2DP_2a_2': b'TRUE',
-    b'TSPC_A2DP_2a_3': b'FALSE',
-    b'TSPC_A2DP_2b_1': b'FALSE',
-    b'TSPC_A2DP_2b_2': b'FALSE',
-    b'TSPC_A2DP_2_2': b'TRUE',
-    b'TSPC_A2DP_2_3': b'TRUE',
-    b'TSPC_A2DP_2_4': b'TRUE',
-    b'TSPC_A2DP_2_5': b'TRUE',
-    b'TSPC_A2DP_2_6': b'TRUE',
-    b'TSPC_A2DP_2_7': b'TRUE',
-    b'TSPC_A2DP_2_8': b'FALSE',
-    b'TSPC_A2DP_2_9': b'FALSE',
-    b'TSPC_A2DP_2_10': b'TRUE',
-    b'TSPC_A2DP_2_10a': b'FALSE',
-    b'TSPC_A2DP_2_11': b'FALSE',
-    b'TSPC_A2DP_2_12': b'FALSE',
-    b'TSPC_A2DP_2_13': b'TRUE',
-    b'TSPC_A2DP_2_14': b'TRUE',
-    b'TSPC_A2DP_2_15': b'FALSE',
-    b'TSPC_A2DP_2_16': b'FALSE',
-    b'TSPC_A2DP_2_17': b'FALSE',
-    b'TSPC_A2DP_3_1': b'TRUE',
-    b'TSPC_A2DP_3_1a': b'FALSE',
-    b'TSPC_A2DP_3_2': b'TRUE',
-    b'TSPC_A2DP_3_3': b'FALSE',
-    b'TSPC_A2DP_3_4': b'FALSE',
-    b'TSPC_A2DP_3_5': b'TRUE',
-    b'TSPC_A2DP_3_6': b'FALSE',
-    b'TSPC_A2DP_3_7': b'FALSE',
-    b'TSPC_A2DP_3_8': b'FALSE',
-    b'TSPC_A2DP_3a_1': b'TRUE',
-    b'TSPC_A2DP_3a_2': b'FALSE',
-    b'TSPC_A2DP_3a_3': b'TRUE',
-    b'TSPC_A2DP_3a_4': b'TRUE',
-    b'TSPC_A2DP_3a_5': b'TRUE',
-    b'TSPC_A2DP_3a_6': b'TRUE',
-    b'TSPC_A2DP_3a_7': b'TRUE',
-    b'TSPC_A2DP_3a_8': b'TRUE',
-    b'TSPC_A2DP_3a_9': b'FALSE',
-    b'TSPC_A2DP_3a_10': b'TRUE',
-    b'TSPC_A2DP_3a_11': b'FALSE',
-    b'TSPC_A2DP_3a_12': b'TRUE',
-    b'TSPC_A2DP_4_1': b'TRUE',
-    b'TSPC_A2DP_4_2': b'TRUE',
-    b'TSPC_A2DP_4_3': b'FALSE',
-    b'TSPC_A2DP_4_4': b'TRUE',
-    b'TSPC_A2DP_4_5': b'TRUE',
-    b'TSPC_A2DP_4_6': b'FALSE',
-    b'TSPC_A2DP_4_7': b'TRUE',
-    b'TSPC_A2DP_4_8': b'FALSE',
-    b'TSPC_A2DP_4_9': b'TRUE',
-    b'TSPC_A2DP_4_10': b'TRUE',
-    b'TSPC_A2DP_4_10a': b'FALSE',
-    b'TSPC_A2DP_4_11': b'FALSE',
-    b'TSPC_A2DP_4_12': b'FALSE',
-    b'TSPC_A2DP_4_13': b'TRUE',
-    b'TSPC_A2DP_4_14': b'TRUE',
-    b'TSPC_A2DP_4_15': b'FALSE',
-    b'TSPC_A2DP_5_1': b'TRUE',
-    b'TSPC_A2DP_5_1a': b'TRUE',
-    b'TSPC_A2DP_5_2': b'TRUE',
-    b'TSPC_A2DP_5_3': b'FALSE',
-    b'TSPC_A2DP_5_4': b'FALSE',
-    b'TSPC_A2DP_5_5': b'FALSE',
-    b'TSPC_A2DP_5a_1': b'TRUE',
-    b'TSPC_A2DP_5a_2': b'TRUE',
-    b'TSPC_A2DP_5a_3': b'TRUE',
-    b'TSPC_A2DP_5a_4': b'TRUE',
-    b'TSPC_A2DP_5a_5': b'TRUE',
-    b'TSPC_A2DP_5a_6': b'TRUE',
-    b'TSPC_A2DP_5a_7': b'TRUE',
-    b'TSPC_A2DP_5a_8': b'TRUE',
-    b'TSPC_A2DP_5a_9': b'TRUE',
-    b'TSPC_A2DP_5a_10': b'TRUE',
-    b'TSPC_A2DP_5a_11': b'TRUE',
-    b'TSPC_A2DP_5a_12': b'TRUE',
-    b'TSPC_A2DP_7a_1': b'FALSE',
-    b'TSPC_A2DP_7a_2': b'FALSE',
-    b'TSPC_A2DP_7a_3': b'FALSE',
-    b'TSPC_A2DP_7b_1': b'FALSE',
-    b'TSPC_A2DP_7b_2': b'FALSE',
-
-    # Not available in Launch Studio Yet
-    b'TSPC_A2DP_10_1': b'FALSE',
-    b'TSPC_A2DP_10_2': b'FALSE',
-    b'TSPC_A2DP_10_3': b'FALSE',
-    b'TSPC_A2DP_10_4': b'FALSE',
-    b'TSPC_A2DP_10_5': b'FALSE',
-    b'TSPC_A2DP_10_6': b'FALSE',
-    b'TSPC_A2DP_11_1': b'FALSE',
-    b'TSPC_A2DP_11_2': b'FALSE',
-    b'TSPC_A2DP_11_3': b'FALSE',
-    b'TSPC_A2DP_11_4': b'FALSE',
-    b'TSPC_A2DP_11_5': b'FALSE',
-    b'TSPC_A2DP_11_6': b'FALSE',
-    b'TSPC_A2DP_12_2': b'FALSE',
-    b'TSPC_A2DP_12_3': b'FALSE',
-    b'TSPC_A2DP_12_3': b'FALSE',
-    b'TSPC_A2DP_12_4': b'FALSE',
-    b'TSPC_A2DP_13_1': b'FALSE',
-    b'TSPC_A2DP_13_2': b'FALSE',
-    b'TSPC_A2DP_13_3': b'FALSE',
-    b'TSPC_A2DP_13_4': b'FALSE',
-    b'TSPC_A2DP_14_1': b'FALSE',
-    b'TSPC_A2DP_14_2': b'FALSE',
-    b'TSPC_A2DP_14_3': b'FALSE',
-    b'TSPC_A2DP_14_4': b'FALSE',
-    b'TSPC_A2DP_14_5': b'FALSE',
-    b'TSPC_A2DP_15_1': b'FALSE',
-    b'TSPC_A2DP_15_2': b'FALSE',
-    b'TSPC_A2DP_15_3': b'FALSE',
-    b'TSPC_A2DP_15_4': b'FALSE',
-    b'TSPC_A2DP_15_5': b'FALSE',
-    b'TSPC_A2DP_15_6': b'FALSE',
-    b'TSPC_A2DP_3_2a': b'FALSE',
-    b'TSPC_A2DP_3_2b': b'FALSE',
-    b'TSPC_A2DP_3_2c': b'FALSE',
-    b'TSPC_A2DP_3_2d': b'FALSE',
-    b'TSPC_A2DP_3_2e': b'FALSE',
-    b'TSPC_A2DP_3_2f': b'FALSE',
-    b'TSPC_A2DP_5_2a': b'FALSE',
-    b'TSPC_A2DP_5_2b': b'FALSE',
-    b'TSPC_A2DP_5_2c': b'FALSE',
-    b'TSPC_A2DP_8_2': b'FALSE',
-    b'TSPC_A2DP_8_3': b'FALSE',
-    b'TSPC_A2DP_8_4': b'FALSE',
-    b'TSPC_A2DP_9_1': b'FALSE',
-    b'TSPC_A2DP_9_2': b'FALSE',
-    b'TSPC_A2DP_9_3': b'FALSE',
-    b'TSPC_A2DP_9_4': b'FALSE',
-
-}
-
-
-GATT_ICS = {
-    b'TSPC_GATT_1_1': b'TRUE',
-    b'TSPC_GATT_1_2': b'TRUE',
-    b'TSPC_GATT_1a_1': b'TRUE',
-    b'TSPC_GATT_1a_2': b'TRUE',
-    b'TSPC_GATT_1a_3': b'TRUE',
-    b'TSPC_GATT_1a_4': b'TRUE',
-    b'TSPC_GATT_1a_5': b'FALSE',
-    b'TSPC_GATT_1a_6': b'FALSE',
-    b'TSPC_GATT_1a_7': b'FALSE',
-    b'TSPC_GATT_1a_8': b'FALSE',
-    b'TSPC_GATT_2_1': b'FALSE',
-    b'TSPC_GATT_2_2': b'TRUE',
-    b'TSPC_GATT_3_1': b'TRUE',
-    b'TSPC_GATT_3_2': b'TRUE',
-    b'TSPC_GATT_3_3': b'TRUE',
-    b'TSPC_GATT_3_4': b'TRUE',
-    b'TSPC_GATT_3_5': b'TRUE',
-    b'TSPC_GATT_3_6': b'FALSE',
-    b'TSPC_GATT_3_7': b'TRUE',
-    b'TSPC_GATT_3_8': b'TRUE',
-    b'TSPC_GATT_3_9': b'TRUE',
-    b'TSPC_GATT_3_10': b'TRUE',
-    b'TSPC_GATT_3_11': b'FALSE',
-    b'TSPC_GATT_3_12': b'TRUE',
-    b'TSPC_GATT_3_13': b'FALSE',
-    b'TSPC_GATT_3_14': b'TRUE',
-    b'TSPC_GATT_3_15': b'TRUE',
-    b'TSPC_GATT_3_16': b'TRUE',
-    b'TSPC_GATT_3_17': b'TRUE',
-    b'TSPC_GATT_3_18': b'TRUE',
-    b'TSPC_GATT_3_19': b'TRUE',
-    b'TSPC_GATT_3_20': b'TRUE',
-    b'TSPC_GATT_3_21': b'TRUE',
-    b'TSPC_GATT_3_22': b'TRUE',
-    b'TSPC_GATT_3_23': b'TRUE',
-    b'TSPC_GATT_3_24': b'FALSE',
-    b'TSPC_GATT_3_25': b'FALSE',
-    b'TSPC_GATT_3_26': b'FALSE',
-    b'TSPC_GATT_3B_1': b'FALSE',
-    b'TSPC_GATT_3B_2': b'FALSE',
-    b'TSPC_GATT_3B_3': b'FALSE',
-    b'TSPC_GATT_3B_4': b'FALSE',
-    b'TSPC_GATT_3B_5': b'FALSE',
-    b'TSPC_GATT_3B_6': b'FALSE',
-    b'TSPC_GATT_3B_7': b'FALSE',
-    b'TSPC_GATT_3B_8': b'FALSE',
-    b'TSPC_GATT_3B_9': b'FALSE',
-    b'TSPC_GATT_3B_10': b'FALSE',
-    b'TSPC_GATT_3B_11': b'FALSE',
-    b'TSPC_GATT_3B_12': b'FALSE',
-    b'TSPC_GATT_3B_13': b'FALSE',
-    b'TSPC_GATT_3B_14': b'FALSE',
-    b'TSPC_GATT_3B_15': b'FALSE',
-    b'TSPC_GATT_3B_16': b'FALSE',
-    b'TSPC_GATT_3B_17': b'FALSE',
-    b'TSPC_GATT_3B_18': b'FALSE',
-    b'TSPC_GATT_3B_19': b'FALSE',
-    b'TSPC_GATT_3B_20': b'FALSE',
-    b'TSPC_GATT_3B_21': b'FALSE',
-    b'TSPC_GATT_3B_22': b'FALSE',
-    b'TSPC_GATT_3B_23': b'FALSE',
-    b'TSPC_GATT_3B_24': b'FALSE',
-    b'TSPC_GATT_3B_25': b'FALSE',
-    b'TSPC_GATT_3B_26': b'FALSE',
-    b'TSPC_GATT_3B_27': b'FALSE',
-    b'TSPC_GATT_3B_28': b'FALSE',
-    b'TSPC_GATT_3B_29': b'FALSE',
-    b'TSPC_GATT_3B_30': b'FALSE',
-    b'TSPC_GATT_3B_31': b'FALSE',
-    b'TSPC_GATT_3B_32': b'FALSE',
-    b'TSPC_GATT_3B_33': b'FALSE',
-    b'TSPC_GATT_3B_34': b'FALSE',
-    b'TSPC_GATT_3B_35': b'FALSE',
-    b'TSPC_GATT_3B_36': b'FALSE',
-    b'TSPC_GATT_3B_37': b'FALSE',
-    b'TSPC_GATT_3B_38': b'FALSE',
-    b'TSPC_GATT_4_1': b'TRUE',
-    b'TSPC_GATT_4_2': b'TRUE',
-    b'TSPC_GATT_4_3': b'TRUE',
-    b'TSPC_GATT_4_4': b'TRUE',
-    b'TSPC_GATT_4_5': b'TRUE',
-    b'TSPC_GATT_4_6': b'TRUE',
-    b'TSPC_GATT_4_7': b'TRUE',
-    b'TSPC_GATT_4_8': b'TRUE',
-    b'TSPC_GATT_4_9': b'TRUE',
-    b'TSPC_GATT_4_10': b'TRUE',
-    b'TSPC_GATT_4_11': b'FALSE',
-    b'TSPC_GATT_4_12': b'TRUE',
-    b'TSPC_GATT_4_13': b'FALSE',
-    b'TSPC_GATT_4_14': b'TRUE',
-    b'TSPC_GATT_4_15': b'TRUE',
-    b'TSPC_GATT_4_16': b'TRUE',
-    b'TSPC_GATT_4_17': b'TRUE',
-    b'TSPC_GATT_4_18': b'TRUE',
-    b'TSPC_GATT_4_19': b'TRUE',
-    b'TSPC_GATT_4_20': b'TRUE',
-    b'TSPC_GATT_4_21': b'TRUE',
-    b'TSPC_GATT_4_22': b'TRUE',
-    b'TSPC_GATT_4_23': b'TRUE',
-    b'TSPC_GATT_4_24': b'FALSE',
-    b'TSPC_GATT_4_25': b'FALSE',
-    b'TSPC_GATT_4_26': b'FALSE',
-    b'TSPC_GATT_4_27': b'FALSE',
-    b'TSPC_GATT_4B_1': b'FALSE',
-    b'TSPC_GATT_4B_2': b'FALSE',
-    b'TSPC_GATT_4B_3': b'FALSE',
-    b'TSPC_GATT_4B_4': b'FALSE',
-    b'TSPC_GATT_4B_5': b'FALSE',
-    b'TSPC_GATT_4B_6': b'FALSE',
-    b'TSPC_GATT_4B_7': b'FALSE',
-    b'TSPC_GATT_4B_8': b'FALSE',
-    b'TSPC_GATT_4B_9': b'FALSE',
-    b'TSPC_GATT_4B_10': b'FALSE',
-    b'TSPC_GATT_4B_11': b'FALSE',
-    b'TSPC_GATT_4B_12': b'FALSE',
-    b'TSPC_GATT_4B_13': b'FALSE',
-    b'TSPC_GATT_4B_14': b'FALSE',
-    b'TSPC_GATT_4B_15': b'FALSE',
-    b'TSPC_GATT_4B_16': b'FALSE',
-    b'TSPC_GATT_4B_17': b'FALSE',
-    b'TSPC_GATT_4B_18': b'FALSE',
-    b'TSPC_GATT_4B_19': b'FALSE',
-    b'TSPC_GATT_4B_20': b'FALSE',
-    b'TSPC_GATT_4B_21': b'FALSE',
-    b'TSPC_GATT_4B_22': b'FALSE',
-    b'TSPC_GATT_4B_23': b'FALSE',
-    b'TSPC_GATT_4B_24': b'FALSE',
-    b'TSPC_GATT_4B_25': b'FALSE',
-    b'TSPC_GATT_4B_26': b'FALSE',
-    b'TSPC_GATT_4B_27': b'FALSE',
-    b'TSPC_GATT_4B_28': b'FALSE',
-    b'TSPC_GATT_4B_29': b'FALSE',
-    b'TSPC_GATT_4B_30': b'FALSE',
-    b'TSPC_GATT_4B_31': b'FALSE',
-    b'TSPC_GATT_4B_32': b'FALSE',
-    b'TSPC_GATT_4B_33': b'FALSE',
-    b'TSPC_GATT_4B_34': b'FALSE',
-    b'TSPC_GATT_4B_35': b'FALSE',
-    b'TSPC_GATT_4B_36': b'FALSE',
-    b'TSPC_GATT_4B_37': b'FALSE',
-    b'TSPC_GATT_4B_38': b'FALSE',
-    b'TSPC_GATT_6_2': b'TRUE',
-    b'TSPC_GATT_6_3': b'TRUE',
-    b'TSPC_GATT_7_1': b'TRUE',
-    b'TSPC_GATT_7_2': b'TRUE',
-    b'TSPC_GATT_7_3': b'TRUE',
-    b'TSPC_GATT_7_4': b'TRUE',
-    b'TSPC_GATT_7_5': b'FALSE',
-    b'TSPC_GATT_7_6': b'FALSE',
-    b'TSPC_GATT_7_7': b'FALSE',
-    b'TSPC_GATT_8_1': b'TRUE',
-    b'TSPC_GAP_0_2': b'FALSE',
-    b'TSPC_GAP_24_2': b'TRUE',
-    b'TSPC_GAP_24_3': b'TRUE',
-    b'TSPC_GAP_34_2': b'TRUE',
-    b'TSPC_GAP_34_3': b'TRUE',
-    b'TSPC_ALL': b'FALSE',
-}
-
-
-SDP_ICS = {
-    b'TSPC_ALL': b'FALSE',
-    b'TSPC_SDP_1_1': b'TRUE',
-    b'TSPC_SDP_1_2': b'TRUE',
-    b'TSPC_SDP_1_3': b'TRUE',
-    b'TSPC_SDP_1b_1': b'TRUE',
-    b'TSPC_SDP_1b_2': b'TRUE',
-    b'TSPC_SDP_2_1': b'TRUE',
-    b'TSPC_SDP_2_2': b'TRUE',
-    b'TSPC_SDP_2_3': b'TRUE',
-    b'TSPC_SDP_3_1': b'TRUE',
-    b'TSPC_SDP_4_1': b'TRUE',
-    b'TSPC_SDP_4_2': b'TRUE',
-    b'TSPC_SDP_4_3': b'TRUE',
-    b'TSPC_SDP_5_1': b'TRUE',
-    b'TSPC_SDP_6_1': b'TRUE',
-    b'TSPC_SDP_6_2': b'TRUE',
-    b'TSPC_SDP_6_3': b'TRUE',
-    b'TSPC_SDP_7_1': b'TRUE',
-    b'TSPC_SDP_8_1': b'FALSE',
-    b'TSPC_SDP_8_2': b'FALSE',
-    b'TSPC_SDP_9_1': b'TRUE',
-    b'TSPC_SDP_9_2': b'TRUE',
-    b'TSPC_SDP_9_3': b'FALSE',
-    b'TSPC_SDP_9_4': b'FALSE',
-    b'TSPC_SDP_9_5': b'TRUE',
-    b'TSPC_SDP_9_6': b'TRUE',
-    b'TSPC_SDP_9_7': b'FALSE',
-    b'TSPC_SDP_9_8': b'FALSE',
-    b'TSPC_SDP_9_9': b'TRUE',
-    b'TSPC_SDP_9_10': b'TRUE',
-    b'TSPC_SDP_9_11': b'TRUE',
-    b'TSPC_SDP_9_12': b'FALSE',
-    b'TSPC_SDP_9_13': b'FALSE',
-    b'TSPC_SDP_9_14': b'TRUE',
-    b'TSPC_SDP_9_15': b'FALSE',
-    b'TSPC_SDP_9_16': b'FALSE',
-    b'TSPC_SDP_9_17': b'TRUE',
-    b'TSPC_SDP_9_18': b'TRUE',
-    b'TSPC_SDP_9_19': b'TRUE',
-}
diff --git a/src/antlion/test_utils/bt/pts/fuchsia_pts_ixit_lib.py b/src/antlion/test_utils/bt/pts/fuchsia_pts_ixit_lib.py
deleted file mode 100644
index c8fdf5c..0000000
--- a/src/antlion/test_utils/bt/pts/fuchsia_pts_ixit_lib.py
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This is a placeholder for all IXIT values in PTS
-    that matter to Fuchsia devices.
-"""
-
-A2DP_IXIT = {
-    b'TSPX_security_enabled': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_bd_addr_iut': (b'OCTETSTRING', b'000000000000'),
-    b'TSPX_SRC_class_of_device': (b'OCTETSTRING', b'080418'),
-    b'TSPX_SNK_class_of_device': (b'OCTETSTRING', b'04041C'),
-    b'TSPX_pin_code': (b'IA5STRING', b'0000'),
-    b'TSPX_delete_link_key': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_time_guard': (b'INTEGER', b'300000'),
-    b'TSPX_use_implicit_send': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_media_directory':
-    (b'IA5STRING', b'C:\Program Files\Bluetooth SIG\Bluetooth PTS\\bin\\audio'),
-    b'TSPX_auth_password': (b'IA5STRING', b'0000'),
-    b'TSPX_auth_user_id': (b'IA5STRING', b'PTS'),
-    b'TSPX_rfcomm_channel': (b'INTEGER', b'8'),
-    b'TSPX_l2cap_psm': (b'OCTETSTRING', b'1011'),
-    b'TSPX_no_confirmations': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_cover_art_uuid': (b'OCTETSTRING', b'3EEE'),
-}
-
-GATT_IXIT = {
-    b'TSPX_bd_addr_iut': (b'OCTETSTRING', b'000000000000'),
-    b'TSPX_iut_device_name_in_adv_packet_for_random_address': (b'IA5STRING', b'tbd'),
-    b'TSPX_security_enabled': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_delete_link_key': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_time_guard': (b'INTEGER', b'180000'),
-    b'TSPX_selected_handle': (b'OCTETSTRING', b'0012'),
-    b'TSPX_use_implicit_send': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_secure_simple_pairing_pass_key_confirmation': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_iut_use_dynamic_bd_addr': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_iut_setup_att_over_br_edr': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_tester_database_file': (b'IA5STRING', b'C:\Program Files\Bluetooth SIG\Bluetooth PTS\Data\SIGDatabase\GATT_Qualification_Test_Databases.xml'),
-    b'TSPX_iut_is_client_periphral': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_iut_is_server_central': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_mtu_size': (b'INTEGER', b'23'),
-    b'TSPX_pin_code':  (b'IA5STRING', b'0000'),
-    b'TSPX_use_dynamic_pin': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_delete_ltk': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_tester_appearance': (b'OCTETSTRING', b'0000'),
-}
-
-SDP_IXIT = {
-    b'TSPX_sdp_service_search_pattern': (b'IA5STRING', b'0100'),
-    b'TSPX_sdp_service_search_pattern_no_results': (b'IA5STRING', b'EEEE'),
-    b'TSPX_sdp_service_search_pattern_additional_protocol_descriptor_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_bluetooth_profile_descriptor_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_browse_group_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_client_exe_url': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_documentation_url': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_icon_url': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_language_base_attribute_id_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_protocol_descriptor_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_provider_name': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_availability': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_data_base_state': (b'IA5STRING', b'1000'),
-    b'TSPX_sdp_service_search_pattern_service_description': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_id': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_info_time_to_live': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_version_number_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_name': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_record_state': (b'IA5STRING', b''),
-    b'TSPX_sdp_unsupported_attribute_id': (b'OCTETSTRING', b'EEEE'),
-    b'TSPX_security_enabled': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_delete_link_key': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_bd_addr_iut': (b'OCTETSTRING', b''),
-    b'TSPX_class_of_device_pts': (b'OCTETSTRING', b'200404'),
-    b'TSPX_class_of_device_test_pts_initiator': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_limited_inquiry_used': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_pin_code': (b'IA5STRING', b'0000'),
-    b'TSPX_time_guard': (b'INTEGER', b'200000'),
-    b'TSPX_device_search_time': (b'INTEGER', b'20'),
-    b'TSPX_use_implicit_send': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_secure_simple_pairing_pass_key_confirmation': (b'BOOLEAN', b'FALSE'),
-}
diff --git a/src/antlion/test_utils/bt/pts/pts_base_class.py b/src/antlion/test_utils/bt/pts/pts_base_class.py
deleted file mode 100644
index cee0389..0000000
--- a/src/antlion/test_utils/bt/pts/pts_base_class.py
+++ /dev/null
@@ -1,355 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This is the PTS base class that is inherited from all PTS
-Tests.
-"""
-
-import re
-import time
-import traceback
-
-from ctypes import *
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.controllers.bluetooth_pts_device import VERDICT_STRINGS
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.signals import TestSignal
-from antlion.test_utils.abstract_devices.bluetooth_device import create_bluetooth_device
-from antlion.test_utils.bt.bt_constants import gatt_transport
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-
-
-class PtsBaseClass(BaseTestClass):
-    """ Class for representing common functionality across all PTS tests.
-
-    This includes the ability to rerun tests due to PTS instability,
-    common PTS action mappings, and setup/teardown related devices.
-
-    """
-    scan_timeout_seconds = 10
-    peer_identifier = None
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_bluetooth_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_bluetooth_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an fuchsia device
-            self.dut = create_bluetooth_device(self.fuchsia_devices[0])
-
-        self.characteristic_read_not_permitted_uuid = self.user_params.get(
-            "characteristic_read_not_permitted_uuid")
-        self.characteristic_read_not_permitted_handle = self.user_params.get(
-            "characteristic_read_not_permitted_handle")
-        self.characteristic_read_invalid_handle = self.user_params.get(
-            "characteristic_read_invalid_handle")
-        self.characteristic_attribute_not_found_uuid = self.user_params.get(
-            "characteristic_attribute_not_found_uuid")
-        self.write_characteristic_not_permitted_handle = self.user_params.get(
-            "write_characteristic_not_permitted_handle")
-
-        self.pts = self.bluetooth_pts_device[0]
-        # MMI functions commented out until implemented. Added for tracking
-        # purposes.
-        self.pts_action_mapping = {
-            "A2DP": {
-                1: self.a2dp_mmi_iut_connectable,
-                1002: self.a2dp_mmi_iut_accept_connect,
-                1020: self.a2dp_mmi_initiate_open_stream,
-            },
-            "GATT": {
-                1: self.mmi_make_iut_connectable,
-                2: self.mmi_iut_initiate_connection,
-                3: self.mmi_iut_initiate_disconnection,
-                # 4: self.mmi_iut_no_security,
-                # 5: self.mmi_iut_initiate_br_connection,
-                10: self.mmi_discover_primary_service,
-                # 11: self.mmi_confirm_no_primary_service_small,
-                # 12: self.mmi_iut_mtu_exchange,
-                # 13: self.mmi_discover_all_service_record,
-                # 14: self.mmi_iut_discover_gatt_service_record,
-                15: self.mmi_iut_find_included_services,
-                # 16: self.mmi_confirm_no_characteristic_uuid_small,
-                17: self.mmi_confirm_primary_service,
-                # 18: self.mmi_send_primary_service_uuid,
-                # 19: self.mmi_confirm_primary_service_uuid,
-                # 22: self.confirm_primary_service_1801,
-                24: self.mmi_confirm_include_service,
-                26: self.mmi_confirm_characteristic_service,
-                # 27: self.perform_read_all_characteristics,
-                29: self.
-                mmi_discover_service_uuid_range,  # AKA: discover service by uuid
-                # 31: self.perform_read_all_descriptors,
-                48: self.mmi_iut_send_read_characteristic_handle,
-                58: self.mmi_iut_send_read_descriptor_handle,
-                70: self.mmi_send_write_command,
-                74: self.mmi_send_write_request,
-                76: self.mmi_send_prepare_write,
-                77: self.mmi_iut_send_prepare_write_greater_offset,
-                80: self.mmi_iut_send_prepare_write_greater,
-                110: self.mmi_iut_enter_handle_read_not_permitted,
-                111: self.mmi_iut_enter_uuid_read_not_permitted,
-                118: self.mmi_iut_enter_handle_invalid,
-                119: self.mmi_iut_enter_uuid_attribute_not_found,
-                120: self.mmi_iut_enter_handle_write_not_permitted,
-                2000: self.mmi_verify_secure_id,  # Enter pairing pin from DUT.
-            },
-            "SDP": {
-                # TODO: Implement MMIs as necessary
-            }
-        }
-        self.pts.bind_to(self.process_next_action)
-
-    def teardown_class(self):
-        self.pts.clean_up()
-
-    def setup_test(self):
-        # Always start the test with RESULT_INCOMP
-        self.pts.pts_test_result = VERDICT_STRINGS['RESULT_INCOMP']
-
-    def teardown_test(self):
-        return True
-
-    @staticmethod
-    def pts_test_wrap(fn):
-        def _safe_wrap_test_case(self, *args, **kwargs):
-            test_id = "{}:{}:{}".format(self.__class__.__name__, fn.__name__,
-                                        time.time())
-            log_string = "[Test ID] {}".format(test_id)
-            self.log.info(log_string)
-            try:
-                self.dut.log_info("Started " + log_string)
-                result = fn(self, *args, **kwargs)
-                self.dut.log_info("Finished " + log_string)
-                rerun_count = self.user_params.get("pts_auto_rerun_count", 0)
-                for i in range(int(rerun_count)):
-                    if result is not True:
-                        self.teardown_test()
-                        log_string = "[Rerun Test ID] {}. Run #{} run failed... Retrying".format(
-                            test_id, i + 1)
-                        self.log.info(log_string)
-                        self.setup_test()
-                        self.dut.log_info("Rerun Started " + log_string)
-                        result = fn(self, *args, **kwargs)
-                    else:
-                        return result
-                return result
-            except TestSignal:
-                raise
-            except Exception as e:
-                self.log.error(traceback.format_exc())
-                self.log.error(str(e))
-                raise
-            return fn(self, *args, **kwargs)
-
-        return _safe_wrap_test_case
-
-    def process_next_action(self, action):
-        func = self.pts_action_mapping.get(
-            self.pts.pts_profile_mmi_request).get(action, "Nothing")
-        if func != 'Nothing':
-            func()
-
-    ### BEGIN A2DP MMI Actions ###
-
-    def a2dp_mmi_iut_connectable(self):
-        self.dut.start_profile_a2dp_sink()
-        self.dut.set_discoverable(True)
-
-    def a2dp_mmi_iut_accept_connect(self):
-        self.dut.start_profile_a2dp_sink()
-        self.dut.set_discoverable(True)
-
-    def a2dp_mmi_initiate_open_stream(self):
-        self.dut.a2dp_initiate_open_stream()
-
-    ### END A2DP MMI Actions ###
-
-    ### BEGIN GATT MMI Actions ###
-
-    def create_write_value_by_size(self, size):
-        write_value = []
-        for i in range(size):
-            write_value.append(i % 256)
-        return write_value
-
-    def mmi_send_write_command(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O with', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_size = re.search('with <= \'(.*)\' byte', description_to_parse)
-        size = int(raw_size.group(1))
-        self.dut.gatt_client_write_characteristic_without_response_by_handle(
-            self.peer_identifier, handle,
-            self.create_write_value_by_size(size))
-
-    def mmi_send_write_request(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O with', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_size = re.search('with <= \'(.*)\' byte', description_to_parse)
-        size = int(raw_size.group(1))
-        offset = 0
-        self.dut.gatt_client_write_characteristic_by_handle(
-            self.peer_identifier, handle, offset,
-            self.create_write_value_by_size(size))
-
-    def mmi_send_prepare_write(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O <=', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_size = re.search('<= \'(.*)\' byte', description_to_parse)
-        size = int(math.floor(int(raw_size.group(1)) / 2))
-        offset = int(size / 2)
-        self.dut.gatt_client_write_characteristic_by_handle(
-            self.peer_identifier, handle, offset,
-            self.create_write_value_by_size(size))
-
-    def mmi_iut_send_prepare_write_greater_offset(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O and', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_offset = re.search('greater than \'(.*)\' byte',
-                               description_to_parse)
-        offset = int(raw_offset.group(1))
-        size = 1
-        self.dut.gatt_client_write_characteristic_by_handle(
-            self.peer_identifier, handle, offset,
-            self.create_write_value_by_size(size))
-
-    def mmi_iut_send_prepare_write_greater(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O with', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_size = re.search('greater than \'(.*)\' byte',
-                             description_to_parse)
-        size = int(raw_size.group(1))
-        offset = 0
-        self.dut.gatt_client_write_characteristic_by_handle(
-            self.peer_identifier, handle, offset,
-            self.create_write_value_by_size(size))
-
-    def mmi_make_iut_connectable(self):
-        adv_data = {
-            "name": fuchsia_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        interval = 1000
-
-        self.dut.start_le_advertisement(adv_data, scan_response, interval,
-                                        connectable)
-
-    def mmi_iut_enter_uuid_read_not_permitted(self):
-        self.pts.extra_answers.append(
-            self.characteristic_read_not_permitted_uuid)
-
-    def mmi_iut_enter_handle_read_not_permitted(self):
-        self.pts.extra_answers.append(
-            self.characteristic_read_not_permitted_handle)
-
-    def mmi_iut_enter_handle_invalid(self):
-        self.pts.extra_answers.append(self.characteristic_read_invalid_handle)
-
-    def mmi_iut_enter_uuid_attribute_not_found(self):
-        self.pts.extra_answers.append(
-            self.characteristic_attribute_not_found_uuid)
-
-    def mmi_iut_enter_handle_write_not_permitted(self):
-        self.pts.extra_answers.append(
-            self.write_characteristic_not_permitted_handle)
-
-    def mmi_verify_secure_id(self):
-        self.pts.extra_answers.append(self.dut.get_pairing_pin())
-
-    def mmi_discover_service_uuid_range(self, uuid):
-        self.dut.gatt_client_mmi_discover_service_uuid_range(
-            self.peer_identifier, uuid)
-
-    def mmi_iut_initiate_connection(self):
-        autoconnect = False
-        transport = gatt_transport['le']
-        adv_name = "PTS"
-        self.peer_identifier = self.dut.le_scan_with_name_filter(
-            "PTS", self.scan_timeout_seconds)
-        if self.peer_identifier is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-        tries = 3
-        for _ in range(tries):
-            if self.dut.gatt_connect(self.peer_identifier, transport,
-                                     autoconnect):
-                return
-
-        raise signals.TestFailure("Unable to connect to peripheral.")
-
-    def mmi_iut_initiate_disconnection(self):
-        if not self.dut.gatt_disconnect(self.peer_identifier):
-            raise signals.TestFailure("Failed to disconnect from peer.")
-
-    def mmi_discover_primary_service(self):
-        self.dut.gatt_refresh()
-
-    def mmi_iut_find_included_services(self):
-        self.dut.gatt_refresh()
-
-        test_result = self.pts.execute_test(test_name)
-        return test_result
-
-    def mmi_confirm_primary_service(self):
-        # TODO: Write verifier that 1800 and 1801 exists. For now just pass.
-        return True
-
-    def mmi_confirm_characteristic_service(self):
-        # TODO: Write verifier that no services exist. For now just pass.
-        return True
-
-    def mmi_confirm_include_service(self, uuid_description):
-        # TODO: Write verifier that input services exist. For now just pass.
-        # Note: List comes in the form of a long string to parse:
-        # Attribute Handle = '0002'O Included Service Attribute handle = '0080'O,End Group Handle = '0085'O,Service UUID = 'A00B'O
-        # \n
-        # Attribute Handle = '0021'O Included Service Attribute handle = '0001'O,End Group Handle = '0006'O,Service UUID = 'A00D'O
-        # \n ...
-        return True
-
-    def mmi_iut_send_read_characteristic_handle(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O to', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        self.dut.gatt_client_read_characteristic_by_handle(
-            self.peer_identifier, handle)
-
-    def mmi_iut_send_read_descriptor_handle(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O to', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        self.dut.gatt_client_descriptor_read_by_handle(self.peer_identifier,
-                                                       handle)
-
-    ### END GATT MMI Actions ###
diff --git a/src/antlion/test_utils/bt/rfcomm_lib.py b/src/antlion/test_utils/bt/rfcomm_lib.py
deleted file mode 100644
index 62d650c..0000000
--- a/src/antlion/test_utils/bt/rfcomm_lib.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Bluetooth adapter libraries
-"""
-
-from antlion.test_utils.bt.bt_constants import bt_rfcomm_uuids
-from antlion.test_utils.bt.bt_test_utils import set_bt_scan_mode
-
-
-class RfcommLib():
-    def __init__(self, log, dut, target_mac_addr=None):
-        self.advertisement_list = []
-        self.dut = dut
-        self.log = log
-        self.target_mac_addr = target_mac_addr
-
-    def set_target_mac_addr(self, mac_addr):
-        self.target_mac_addr = mac_addr
-
-    def connect(self, line):
-        """Perform an RFCOMM connect"""
-        uuid = None
-        if len(line) > 0:
-            uuid = line
-        if uuid:
-            self.dut.droid.bluetoothRfcommBeginConnectThread(
-                self.target_mac_addr, uuid)
-        else:
-            self.dut.droid.bluetoothRfcommBeginConnectThread(
-                self.target_mac_addr)
-
-    def open_rfcomm_socket(self):
-        """Open rfcomm socket"""
-        self.dut.droid.rfcommCreateRfcommSocket(self.target_mac_addr, 1)
-
-    def open_l2cap_socket(self):
-        """Open L2CAP socket"""
-        self.dut.droid.rfcommCreateL2capSocket(self.target_mac_addr, 1)
-
-    def write(self, line):
-        """Write String data over an RFCOMM connection"""
-        self.dut.droid.bluetoothRfcommWrite(line)
-
-    def write_binary(self, line):
-        """Write String data over an RFCOMM connection"""
-        self.dut.droid.bluetoothRfcommWriteBinary(line)
-
-    def end_connect(self):
-        """End RFCOMM connection"""
-        self.dut.droid.bluetoothRfcommEndConnectThread()
-
-    def accept(self, line):
-        """Accept RFCOMM connection"""
-        uuid = None
-        if len(line) > 0:
-            uuid = line
-        if uuid:
-            self.dut.droid.bluetoothRfcommBeginAcceptThread(uuid)
-        else:
-            self.dut.droid.bluetoothRfcommBeginAcceptThread(
-                bt_rfcomm_uuids['base_uuid'])
-
-    def stop(self):
-        """Stop RFCOMM Connection"""
-        self.dut.droid.bluetoothRfcommStop()
-
-    def open_l2cap_socket(self):
-        """Open L2CAP socket"""
-        self.dut.droid.rfcommCreateL2capSocket(self.target_mac_addr, 1)
diff --git a/src/antlion/test_utils/bt/shell_commands_lib.py b/src/antlion/test_utils/bt/shell_commands_lib.py
deleted file mode 100644
index 0eafd73..0000000
--- a/src/antlion/test_utils/bt/shell_commands_lib.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Shell command library.
-"""
-
-
-class ShellCommands():
-    def __init__(self, log, dut):
-        self.dut = dut
-        self.log = log
-
-    def set_battery_level(self, level):
-        """Set the battery level via ADB shell
-        Args:
-            level: the percent level to set
-        """
-        self.dut.adb.shell("dumpsys battery set level {}".format(level))
-
-    def disable_ble_scanning(self):
-        """Disable BLE scanning via ADB shell"""
-        self.dut.adb.shell("settings put global ble_scan_always_enabled 0")
-
-    def enable_ble_scanning(self):
-        """Enable BLE scanning via ADB shell"""
-        self.dut.adb.shell("settings put global ble_scan_always_enabled 1")
-
-    def consume_cpu_core(self):
-        """Consume a CPU core on the Android device via ADB shell"""
-        self.dut.adb.shell("echo $$ > /dev/cpuset/top-app/tasks")
-        self.dut.adb.shell("cat /dev/urandom > /dev/null &")
diff --git a/src/antlion/test_utils/bt/simulated_carkit_device.py b/src/antlion/test_utils/bt/simulated_carkit_device.py
deleted file mode 100644
index 533184e..0000000
--- a/src/antlion/test_utils/bt/simulated_carkit_device.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-
-from antlion.controllers import android_device
-from antlion.test_utils.bt.bt_test_utils import bluetooth_enabled_check
-
-# TODO: This class to be deprecated for
-# ../acts/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py
-
-
-class SimulatedCarkitDevice():
-    def __init__(self, serial):
-        self.ad = android_device.create(serial)[0]
-        if not bluetooth_enabled_check(self.ad):
-            asserts.fail("No able to turn on bluetooth")
-        self.mac_address = self.ad.droid.bluetoothGetLocalAddress()
-        self.ad.droid.bluetoothToggleState(False)
-        self.ad.droid.bluetoothMediaConnectToCarMBS()
-
-    def destroy(self):
-        self.ad.clean_up()
-
-    def accept_call(self):
-        return self.ad.droid.telecomAcceptRingingCall(None)
-
-    def end_call(self):
-        return self.ad.droid.telecomEndCall()
-
-    def enter_pairing_mode(self):
-        self.ad.droid.bluetoothStartPairingHelper(True)
-        return self.ad.droid.bluetoothMakeDiscoverable()
-
-    def next_track(self):
-        return self.ad.droid.bluetoothMediaPassthrough("skipNext")
-
-    def pause(self):
-        return self.ad.droid.bluetoothMediaPassthrough("pause")
-
-    def play(self):
-        return self.ad.droid.bluetoothMediaPassthrough("play")
-
-    def power_off(self):
-        return self.ad.droid.bluetoothToggleState(False)
-
-    def power_on(self):
-        return self.ad.droid.bluetoothToggleState(True)
-
-    def previous_track(self):
-        return self.ad.droid.bluetoothMediaPassthrough("skipPrev")
-
-    def reject_call(self):
-        return self.ad.droid.telecomCallDisconnect(
-            self.ad.droid.telecomCallGetCallIds()[0])
-
-    def volume_down(self):
-        target_step = self.ad.droid.getMediaVolume() - 1
-        target_step = max(target_step, 0)
-        return self.ad.droid.setMediaVolume(target_step)
-
-    def volume_up(self):
-        target_step = self.ad.droid.getMediaVolume() + 1
-        max_step = self.ad.droid.getMaxMediaVolume()
-        target_step = min(target_step, max_step)
-        return self.ad.droid.setMediaVolume(target_step)
diff --git a/src/antlion/test_utils/bt/__init__.py b/src/antlion/test_utils/dhcp/__init__.py
similarity index 100%
rename from src/antlion/test_utils/bt/__init__.py
rename to src/antlion/test_utils/dhcp/__init__.py
diff --git a/src/antlion/test_utils/dhcp/base_test.py b/src/antlion/test_utils/dhcp/base_test.py
new file mode 100644
index 0000000..6f68c3e
--- /dev/null
+++ b/src/antlion/test_utils/dhcp/base_test.py
@@ -0,0 +1,263 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap, AccessPoint
+from antlion.controllers.ap_lib import dhcp_config
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
+from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
+from antlion.test_utils.wifi import base_test
+
+from mobly import asserts
+
+
+class Dhcpv4InteropFixture(base_test.WifiBaseTest):
+    """Test helpers for validating DHCPv4 Interop
+
+    Test Bed Requirement:
+    * One Android device or Fuchsia device
+    * One Access Point
+    """
+
+    def setup_class(self):
+        super().setup_class()
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
+            self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
+
+        self.access_point: AccessPoint = self.access_points[0]
+        self.access_point.stop_all_aps()
+
+    def setup_test(self):
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockAcquireBright()
+                ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+
+    def teardown_test(self):
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockRelease()
+                ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.access_point.stop_all_aps()
+
+    def connect(self, ap_params):
+        asserts.assert_true(
+            self.dut.associate(
+                ap_params["ssid"],
+                target_pwd=ap_params["password"],
+                target_security=ap_params["target_security"],
+            ),
+            "Failed to connect.",
+        )
+
+    def setup_ap(self):
+        """Generates a hostapd config and sets up the AP with that config.
+        Does not run a DHCP server.
+
+        Returns: A dictionary of information about the AP.
+        """
+        ssid = utils.rand_ascii_str(20)
+        security_mode = hostapd_constants.WPA2_STRING
+        security_profile = Security(
+            security_mode=security_mode,
+            password=generate_random_password(length=20),
+            wpa_cipher="CCMP",
+            wpa2_cipher="CCMP",
+        )
+        password = security_profile.password
+        target_security = (
+            hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                security_mode
+            )
+        )
+
+        ap_ids = setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            mode=hostapd_constants.MODE_11N_MIXED,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            n_capabilities=[],
+            ac_capabilities=[],
+            force_wmm=True,
+            ssid=ssid,
+            security=security_profile,
+            password=password,
+        )
+
+        if len(ap_ids) > 1:
+            raise Exception("Expected only one SSID on AP")
+
+        configured_subnets = self.access_point.get_configured_subnets()
+        if len(configured_subnets) > 1:
+            raise Exception("Expected only one subnet on AP")
+        router_ip = configured_subnets[0].router
+        network = configured_subnets[0].network
+
+        self.access_point.stop_dhcp()
+
+        return {
+            "ssid": ssid,
+            "password": password,
+            "target_security": target_security,
+            "ip": router_ip,
+            "network": network,
+            "id": ap_ids[0],
+        }
+
+    def device_can_ping(self, dest_ip):
+        """Checks if the DUT can ping the given address.
+
+        Returns: True if can ping, False otherwise"""
+        self.log.info("Attempting to ping %s..." % dest_ip)
+        ping_result = self.dut.can_ping(dest_ip, count=2)
+        if ping_result:
+            self.log.info("Success pinging: %s" % dest_ip)
+        else:
+            self.log.info("Failure pinging: %s" % dest_ip)
+        return ping_result
+
+    def get_device_ipv4_addr(self, interface=None, timeout=20):
+        """Checks if device has an ipv4 private address. Sleeps 1 second between
+        retries.
+
+        Args:
+            interface: string, name of interface from which to get ipv4 address.
+
+        Raises:
+            ConnectionError, if DUT does not have an ipv4 address after all
+            timeout.
+
+        Returns:
+            The device's IP address
+
+        """
+        self.log.debug("Fetching updated WLAN interface list")
+        if interface is None:
+            interface = self.dut.device.wlan_client_test_interface_name
+        self.log.info(
+            "Checking if DUT has received an ipv4 addr on iface %s. Will retry for %s "
+            "seconds." % (interface, timeout)
+        )
+        timeout = time.time() + timeout
+        while time.time() < timeout:
+            ip_addrs = self.dut.device.get_interface_ip_addresses(interface)
+
+            if len(ip_addrs["ipv4_private"]) > 0:
+                ip = ip_addrs["ipv4_private"][0]
+                self.log.info("DUT has an ipv4 address: %s" % ip)
+                return ip
+            else:
+                self.log.debug(
+                    "DUT does not yet have an ipv4 address...retrying in 1 " "second."
+                )
+                time.sleep(1)
+        else:
+            raise ConnectionError("DUT failed to get an ipv4 address.")
+
+    def run_test_case_expect_dhcp_success(self, _test_name, settings):
+        """Starts the AP and DHCP server, and validates that the client
+        connects and obtains an address.
+
+        Args:
+            _test_name: name of the test being run, this variable is not used
+            settings: a dictionary containing:
+                dhcp_parameters: a dictionary of DHCP parameters
+                dhcp_options: a dictionary of DHCP options
+        """
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params["network"],
+            router=ap_params["ip"],
+            additional_parameters=settings["dhcp_parameters"],
+            additional_options=settings["dhcp_options"],
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+
+        self.log.debug("DHCP Configuration:\n" + dhcp_conf.render_config_file() + "\n")
+
+        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+        self.connect(ap_params=ap_params)
+
+        # Typical log lines look like:
+        # dhcpd[26695]: DHCPDISCOVER from f8:0f:f9:3d:ce:d1 via wlan1
+        # dhcpd[26695]: DHCPOFFER on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
+        # dhcpd[26695]: DHCPREQUEST for 192.168.9.2 (192.168.9.1) from f8:0f:f9:3d:ce:d1 via wlan1
+        # dhcpd[26695]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
+
+        try:
+            ip = self.get_device_ipv4_addr()
+        except ConnectionError:
+            self.log.warn(dhcp_logs)
+            asserts.fail(f"DUT failed to get an IP address")
+
+        # Get updates to DHCP logs
+        dhcp_logs = self.access_point.get_dhcp_logs()
+
+        expected_string = f"DHCPDISCOVER from"
+        asserts.assert_equal(
+            dhcp_logs.count(expected_string),
+            1,
+            f'Incorrect count of DHCP Discovers ("{expected_string}") in logs:\n'
+            + dhcp_logs
+            + "\n",
+        )
+
+        expected_string = f"DHCPOFFER on {ip}"
+        asserts.assert_equal(
+            dhcp_logs.count(expected_string),
+            1,
+            f'Incorrect count of DHCP Offers ("{expected_string}") in logs:\n'
+            + dhcp_logs
+            + "\n",
+        )
+
+        expected_string = f"DHCPREQUEST for {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 1,
+            f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+        expected_string = f"DHCPACK on {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 1,
+            f'Incorrect count of DHCP Acks ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+        asserts.assert_true(
+            self.device_can_ping(ap_params["ip"]),
+            f'DUT failed to ping router at {ap_params["ip"]}',
+        )
diff --git a/src/antlion/test_utils/fuchsia/bt_test_utils.py b/src/antlion/test_utils/fuchsia/bt_test_utils.py
deleted file mode 100644
index 4706f2c..0000000
--- a/src/antlion/test_utils/fuchsia/bt_test_utils.py
+++ /dev/null
@@ -1,237 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import re
-import subprocess
-import time
-
-PERSISTENT_BLUETOOTH_STORAGE_LOCATION = "/data/persistent/c1a6d0aebbf7c092c53e8e696636af8ec0629ff39b7f2e548430b0034d809da4/stash_secure.store"
-
-
-def le_scan_for_device_by_name(fd,
-                               log,
-                               search_name,
-                               timeout,
-                               partial_match=False,
-                               self_manage_scan=True):
-    """Scan for and returns the first BLE advertisement with the device name.
-
-    Args:
-        fd: The Fuchsia device to start LE scanning on.
-        log: The log var passed in from the test.
-        search_name: The name to find.
-        timeout: How long to scan for.
-        partial_match: Only do a partial match for the LE advertising name.
-          This will return the first result that had a partial match.
-        self_manage_scan: Whther or not this function should start/stop (True)
-          scans or if the caller should (False).
-
-    Returns:
-        The dictionary of device information.
-    """
-    if self_manage_scan:
-        scan_filter = {"name_substring": search_name}
-        fd.sl4f.gattc_lib.bleStartBleScan(scan_filter)
-    end_time = time.time() + timeout
-    found_device = None
-    while time.time() < end_time and not found_device:
-        time.sleep(1)
-        scan_res = fd.sl4f.gattc_lib.bleGetDiscoveredDevices()['result']
-        for device in scan_res:
-            name, did, connectable = device["name"], device["id"], device[
-                "connectable"]
-            if name == search_name or (partial_match and search_name in name):
-                log.info("Successfully found advertisement! name, id: {}, {}".
-                         format(name, did))
-                found_device = device
-    if self_manage_scan:
-        fd.sl4f.gattc_lib.bleStopBleScan()
-    if not found_device:
-        log.error("Failed to find device with name {}.".format(search_name))
-    return found_device
-
-
-def bredr_scan_for_device_by_name(fd,
-                                  log,
-                                  search_name,
-                                  timeout,
-                                  partial_match=False):
-    """Discover for and returns the first Classic device that matches search_name.
-
-    Args:
-        fd: The Fuchsia device to start Classic discovery on.
-        log: The log var passed in from the test.
-        search_name: The name to find.
-        timeout: How long to scan for.
-        partial_match: Only do a partial match for the search_name.
-          This will return the first result that had a partial match.
-
-    Returns:
-        The dictionary of device information.
-    """
-    fd.sl4f.bts_lib.requestDiscovery(True)
-
-    end_time = time.time() + timeout
-    found_device = None
-    while time.time() < end_time and not found_device:
-        scan_res = fd.sl4f.bts_lib.getKnownRemoteDevices()['result']
-        for device in scan_res:
-            name, did = scan_res[device]["name"], scan_res[device]["id"]
-            if name == search_name or (partial_match and search_name in name):
-                log.info("Successfully found peer! name, id: {}, {}".format(
-                    name, did))
-                found_device = did
-        time.sleep(1)
-    fd.sl4f.bts_lib.requestDiscovery(False)
-    if not found_device:
-        log.error("Failed to find device with name {}.".format(search_name))
-        return found_device
-    return found_device
-
-
-def unbond_all_known_devices(fd, log):
-    """Unbond all known devices from input Fuchsia Device.
-
-    Args:
-        fd: The Fuchsia device to unbond devices from.
-        log: The log var passed in from the test.
-    """
-    fd.sl4f.bts_lib.requestDiscovery(True)
-    device_list = fd.sl4f.bts_lib.getKnownRemoteDevices()['result']
-    fd.sl4f.bts_lib.requestDiscovery(False)
-    for device in device_list:
-        d = device_list[device]
-        if d['bonded'] or d['connected']:
-            log.info("Unbonding device: {}".format(d))
-            log.info(fd.sl4f.bts_lib.forgetDevice(d['id'])['result'])
-
-
-def verify_device_state_by_name(fd, log, search_name, state, services=None):
-    """Verify a connection state change happened an input device.
-
-    Args:
-        fd: The Fuchsia device to unbond devices from.
-        log: The log var passed in from the test.
-        search_name: The device name to find.
-        state: The expected state.
-        services: An optional list of services to expect based on the connected
-            device.
-    """
-    fd.sl4f.bts_lib.requestDiscovery(True)
-
-    seconds_allowed_for_state_change = 10
-    end_time = time.time() + seconds_allowed_for_state_change
-    found_state = None
-    while time.time() < end_time and not found_state:
-        device_list = fd.sl4f.bts_lib.getKnownRemoteDevices()['result']
-        for device in device_list:
-            d = device_list[device]
-            name = d['name']
-            if name == search_name:
-                print(d)
-                if state == "CONNECTED" and d['connected']:
-                    log.info("Found connected device {}".format(d))
-                    found_state = True
-                    break
-                if state == "BONDED" and d['bonded']:
-                    log.info("Found bonded device {}".format(d))
-                    found_state = True
-                    break
-        time.sleep(1)
-    #TODO: Verify services.
-    fd.sl4f.bts_lib.requestDiscovery(False)
-    return found_state
-
-
-def decode_list_to_link_key(raw_list):
-    """ Decodes the input int list to a string link key
-    Args:
-        raw_list: The list of int values to convert
-    Returns:
-        A string represetnation of the link key
-    """
-    str_list = ""
-    raw_list.reverse()
-    for item in raw_list:
-        check = str(hex(int(item)))[2:]
-        if len(check) == 1:
-            check = "0{}".format(check)
-        str_list += check
-    return str_list
-
-
-def get_link_keys(fd, save_path):
-    """Get Bluetooth link keys and LTKs for input Fuchsia device.
-
-    Args:
-        fd: The Fuchsia device object.
-        save_path: The custom save path.
-    Returns:
-        Dictionary of known LTKs and link keys
-    """
-    subprocess.run([
-        f"scp -F {fd.ssh_config} -6 [{fd.ip}]:{PERSISTENT_BLUETOOTH_STORAGE_LOCATION} {save_path}"
-    ],
-                   shell=True)
-    stash_secure_output = ""
-    with open(save_path, 'rb') as file:
-        stash_secure_output = file.read()
-    non_ascii_bytes_removed = re.sub(rb'[^\x00-\x7f]', rb'',
-                                     stash_secure_output).decode('utf-8')
-
-    bonding_data_split = non_ascii_bytes_removed.split("bonding-data:")
-    bonding_data_split.pop(0)
-    data_dict = {}
-    for data in bonding_data_split:
-        if "saved_networks" in data:
-            data = data.split("saved_networks")[0]
-        trailing_data_removed = re.sub(r'^.*?{', '{', data).strip()
-
-        more_trailing_data = trailing_data_removed.rsplit('}', 1)[0] + "}"
-        # Sometimes 'ost-data' will be apended at the end.
-        even_more_trailing_info = more_trailing_data.split('ost-data')[0]
-
-        # Remove the special chars at the end of the string that start with x1b
-        clean_json = more_trailing_data.split('\x1b')[0]
-
-        json_conversion = json.loads(clean_json)
-        identifier = json_conversion.get("identifier")
-        device_name = json_conversion.get("name")
-
-        device_address = decode_list_to_link_key(
-            json_conversion.get("address").get("value"))
-        device_address = ':'.join([
-            device_address[i:i + 2] for i in range(0, len(device_address), 2)
-        ])
-
-        data_dict[identifier] = {
-            "device_name": device_name,
-            "device_address": device_address
-        }
-
-        if json_conversion.get("bredr") is not None:
-            link_key = decode_list_to_link_key(
-                json_conversion.get("bredr").get("linkKey").get("value"))
-            data_dict[identifier]["bredr_link_key"] = link_key
-
-        if json_conversion.get("le") is not None:
-            ltk_key = decode_list_to_link_key(
-                json_conversion.get("le").get("localLtk").get("key").get(
-                    "value"))
-            data_dict[identifier]["le_ltk"] = ltk_key
-
-    return data_dict
diff --git a/src/antlion/test_utils/fuchsia/sdp_records.py b/src/antlion/test_utils/fuchsia/sdp_records.py
deleted file mode 100644
index 84f442e..0000000
--- a/src/antlion/test_utils/fuchsia/sdp_records.py
+++ /dev/null
@@ -1,491 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.bt.bt_constants import bt_attribute_values
-from antlion.test_utils.bt.bt_constants import sig_uuid_constants
-
-BASE_UUID = sig_uuid_constants['BASE_UUID']
-
-# A list of pre-defined SDP definitions
-sdp_pts_record_list = [
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['AudioSink'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['AVDTP'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVDTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['AdvancedAudioDistribution'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors': [{
-            'protocol':
-            int(sig_uuid_constants['L2CAP'], 16),
-            'params': [
-                {
-                    'data': int(sig_uuid_constants['AVDTP'], 16),
-                },
-                {
-                    'data': int(sig_uuid_constants['AVCTP'], 16),
-                },
-                {
-                    'data': int(sig_uuid_constants['GenericAudio'], 16),
-                },
-            ]
-        }],
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_SERVICE_AVAILABILITY'],
-            'element': {
-                'data': 0xff  # Indicate all available
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [
-            BASE_UUID.format(sig_uuid_constants['A/V_RemoteControlTarget']),
-            BASE_UUID.format(sig_uuid_constants['A/V_RemoteControl']),
-            BASE_UUID.format(sig_uuid_constants['A/V_RemoteControlController'])
-        ],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['AVCTP'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['PANU'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['NAP'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['SerialPort'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['SerialPort'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['DialupNetworking'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['DialupNetworking'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['OBEXObjectPush'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['OBEXObjectPush'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['OBEXFileTransfer'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['OBEXFileTransfer'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['Headset'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['Headset'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['HandsfreeAudioGateway'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['HandsfreeAudioGateway'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['Handsfree'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['Handsfree'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['SIM_Access'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['SIM_Access'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    }
-]
diff --git a/src/antlion/test_utils/fuchsia/utils.py b/src/antlion/test_utils/fuchsia/utils.py
index 01f7da2..89bbc64 100644
--- a/src/antlion/test_utils/fuchsia/utils.py
+++ b/src/antlion/test_utils/fuchsia/utils.py
@@ -15,18 +15,20 @@
 # limitations under the License.
 
 import os
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError
+from antlion.controllers.fuchsia_lib.ssh import SSHError
 
 
-def http_file_download_by_curl(fd,
-                               url,
-                               out_path='/tmp/',
-                               curl_loc='/bin/curl',
-                               remove_file_after_check=True,
-                               timeout=3600,
-                               limit_rate=None,
-                               additional_args=None,
-                               retry=3):
+def http_file_download_by_curl(
+    fd,
+    url,
+    out_path="/tmp/",
+    curl_loc="/bin/curl",
+    remove_file_after_check=True,
+    timeout=3600,
+    limit_rate=None,
+    additional_args=None,
+    retry=3,
+):
     """Download http file by ssh curl.
 
     Args:
@@ -42,39 +44,38 @@
         additional_args: Any additional args for curl.
         retry: the retry request times provided in curl command.
     """
-    file_directory, file_name = _generate_file_directory_and_file_name(
-        url, out_path)
+    file_directory, file_name = _generate_file_directory_and_file_name(url, out_path)
     file_path = os.path.join(file_directory, file_name)
     curl_cmd = curl_loc
     if limit_rate:
-        curl_cmd += f' --limit-rate {limit_rate}'
+        curl_cmd += f" --limit-rate {limit_rate}"
     if retry:
-        curl_cmd += f' --retry {retry}'
+        curl_cmd += f" --retry {retry}"
     if additional_args:
-        curl_cmd += f' {additional_args}'
-    curl_cmd += f' --url {url} > {file_path}'
+        curl_cmd += f" {additional_args}"
+    curl_cmd += f" --url {url} > {file_path}"
 
-    fd.log.info(f'Download {url} to {file_path} by ssh command {curl_cmd}')
+    fd.log.info(f"Download {url} to {file_path} by ssh command {curl_cmd}")
     try:
         fd.ssh.run(curl_cmd, timeout_sec=timeout)
         if _check_file_existence(fd, file_path):
-            fd.log.info(f'{url} is downloaded to {file_path} successfully')
+            fd.log.info(f"{url} is downloaded to {file_path} successfully")
             return True
 
-        fd.log.warning(f'Fail to download {url}')
+        fd.log.warning(f"Fail to download {url}")
         return False
-    except FuchsiaSSHError as e:
+    except SSHError as e:
         fd.log.warning(f'Command "{curl_cmd}" failed with error {e}')
         return False
     except Exception as e:
-        fd.log.error(f'Download {url} failed with unexpected exception {e}')
+        fd.log.error(f"Download {url} failed with unexpected exception {e}")
         return False
     finally:
         if remove_file_after_check:
-            fd.log.info(f'Remove the downloaded file {file_path}')
+            fd.log.info(f"Remove the downloaded file {file_path}")
             try:
-                fd.ssh.run(f'rm {file_path}')
-            except FuchsiaSSHError:
+                fd.ssh.run(f"rm {file_path}")
+            except SSHError:
                 pass
 
 
@@ -90,10 +91,10 @@
         file_directory: The directory of where to store the downloaded file.
         file_name: The name of the file that is being downloaded.
     """
-    file_name = url.split('/')[-1]
+    file_name = url.split("/")[-1]
     if not out_path:
-        file_directory = '/tmp/'
-    elif not out_path.endswith('/'):
+        file_directory = "/tmp/"
+    elif not out_path.endswith("/"):
         file_directory, file_name = os.path.split(out_path)
     else:
         file_directory = out_path
@@ -110,10 +111,10 @@
     """
     try:
         result = fd.ssh.run(f'ls -al "{file_path}"')
-        fd.log.debug(f'File {file_path} exists.')
+        fd.log.debug(f"File {file_path} exists.")
         return True
-    except FuchsiaSSHError as e:
-        if 'No such file or directory' in e.result.stderr:
-            fd.log.debug(f'File {file_path} does not exist.')
+    except SSHError as e:
+        if "No such file or directory" in e.result.stderr:
+            fd.log.debug(f"File {file_path} does not exist.")
             return False
         raise e
diff --git a/src/antlion/test_utils/fuchsia/wmm_test_cases.py b/src/antlion/test_utils/fuchsia/wmm_test_cases.py
index d5aa3c5..48eb8ce 100644
--- a/src/antlion/test_utils/fuchsia/wmm_test_cases.py
+++ b/src/antlion/test_utils/fuchsia/wmm_test_cases.py
@@ -16,1224 +16,1311 @@
 
 # Internal Traffic Differentiation
 test_internal_traffic_diff_VO_VI = {
-    'phase_1': {
-        'stream_VO':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.87,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_VI':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VO')
-             ])
+    "phase_1": {
+        "stream_VO": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_VI": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VO")],
+        ),
     }
 }
 
 test_internal_traffic_diff_VO_BE = {
-    'phase_1': {
-        'stream_VO':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VO')
-             ])
+    "phase_1": {
+        "stream_VO": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VO")],
+        ),
     }
 }
 
 test_internal_traffic_diff_VO_BK = {
-    'phase_1': {
-        'stream_VO':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VO')
-             ])
+    "phase_1": {
+        "stream_VO": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VO")],
+        ),
     }
 }
 
 test_internal_traffic_diff_VI_BE = {
-    'phase_1': {
-        'stream_VI':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VI')
-             ])
+    "phase_1": {
+        "stream_VI": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VI")],
+        ),
     }
 }
 
 test_internal_traffic_diff_VI_BK = {
-    'phase_1': {
-        'stream_VI':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VI')
-             ])
+    "phase_1": {
+        "stream_VI": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VI")],
+        ),
     }
 }
 
 test_internal_traffic_diff_BE_BK = {
-    'phase_1': {
-        'stream_BE':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_BE')
-             ])
+    "phase_1": {
+        "stream_BE": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_BE")],
+        ),
     }
 }
 # External Traffic Differentiation
 
 # Single station, STAUT transmits high priority
 test_external_traffic_diff_staut_VO_ap_VI = {
-    'phase_1': {
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.87,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_staut_to_ap')
-             ])
+    "phase_1": {
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_staut_to_ap")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_VO_ap_BE = {
-    'phase_1': {
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_staut_to_ap')
-             ])
+    "phase_1": {
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_staut_to_ap")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_VO_ap_BK = {
-    'phase_1': {
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_staut_to_ap')
-             ])
+    "phase_1": {
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_staut_to_ap")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_VI_ap_BE = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VI_staut_to_ap')
-             ])
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VI_staut_to_ap")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_VI_ap_BK = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VI_staut_to_ap')
-             ])
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VI_staut_to_ap")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_BE_ap_BK = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_BE_staut_to_ap')
-             ])
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_BE_staut_to_ap")
+            ],
+        ),
     }
 }
 
 # Single station, STAUT transmits low priority
 test_external_traffic_diff_staut_VI_ap_VO = {
-    'phase_1': {
-        'stream_VO_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.87,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_ap_to_staut')
-             ])
+    "phase_1": {
+        "stream_VO_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_ap_to_staut")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_BE_ap_VO = {
-    'phase_1': {
-        'stream_VO_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_ap_to_staut')
-             ])
+    "phase_1": {
+        "stream_VO_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_ap_to_staut")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_BK_ap_VO = {
-    'phase_1': {
-        'stream_VO_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_ap_to_staut')
-             ])
+    "phase_1": {
+        "stream_VO_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_ap_to_staut")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_BE_ap_VI = {
-    'phase_1': {
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VI_ap_to_staut')
-             ])
+    "phase_1": {
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VI_ap_to_staut")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_BK_ap_VI = {
-    'phase_1': {
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VI_ap_to_staut')
-             ])
+    "phase_1": {
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VI_ap_to_staut")
+            ],
+        ),
     }
 }
 
 test_external_traffic_diff_staut_BK_ap_BE = {
-    'phase_1': {
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_BE_ap_to_staut')
-             ])
+    "phase_1": {
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_BE_ap_to_staut")
+            ],
+        ),
     }
 }
 
 # Dual Internal/External Traffic Differetiation
 
 test_dual_traffic_diff_staut_VO_VI_ap_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_VI_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.5)
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_VI_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.5,
+        ),
     }
 }
 
 test_dual_traffic_diff_staut_VO_BE_ap_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BE_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.5)
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BE_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.5,
+        ),
     }
 }
 
 test_dual_traffic_diff_staut_VO_BK_ap_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BK_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5)
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BK_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+        ),
     }
 }
 
 test_dual_traffic_diff_staut_VI_BE_ap_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BE_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.5)
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BE_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.5,
+        ),
     }
 }
 
 test_dual_traffic_diff_staut_VI_BK_ap_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BK_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5)
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BK_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+        ),
     }
 }
 
 test_dual_traffic_diff_staut_BE_BK_ap_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BK_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5)
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BK_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+        ),
     }
 }
 
 # ACM Bit Conformance Tests (Single station, as WFA test below uses two)
 test_acm_bit_on_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VI',
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
             max_bandwidth_percentage=0.6,
             validation=[
                 # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6),
-        'stream_BE_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6)
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_BE_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
     }
 }
 
 # AC Parameter Modificiation Tests (Single station, as WFA test below uses two)
 test_ac_param_degrade_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VI',
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
             max_bandwidth_percentage=0.6,
             validation=[
                 # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6),
-        'stream_BE_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6)
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_BE_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
     }
 }
 
 test_ac_param_degrade_VO = {
-    'phase_1': {
-        'stream_VO_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VO',
+    "phase_1": {
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
             max_bandwidth_percentage=0.6,
             validation=[
                 # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6),
-        'stream_BE_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6)
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_BE_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
     }
 }
 
 test_ac_param_improve_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_BE',
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
             max_bandwidth_percentage=0.6,
             validation=[
                 # TODO(): This should technically be an "or"
-                dict(operator='>',
-                     phase='phase_1',
-                     stream='stream_VI_staut_to_ap_1',
-                     bandwidth_percentage=0.869,
-                     rel_tolerance=0.05),
-                dict(operator='>',
-                     phase='phase_1',
-                     stream='stream_VI_staut_to_ap_2',
-                     bandwidth_percentage=0.869,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_VI_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.6),
-        'stream_VI_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.6)
+                dict(
+                    operator=">",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap_1",
+                    bandwidth_percentage=0.869,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator=">",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap_2",
+                    bandwidth_percentage=0.869,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_VI_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_VI_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+        ),
     }
 }
 
 test_ac_param_improve_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_BK',
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
             max_bandwidth_percentage=0.6,
             validation=[
                 # TODO(): This should technically be an "or"
-                dict(operator='>',
-                     phase='phase_1',
-                     stream='stream_VI_staut_to_ap_1',
-                     bandwidth_percentage=0.869,
-                     rel_tolerance=0.05),
-                dict(operator='>',
-                     phase='phase_1',
-                     stream='stream_VI_staut_to_ap_2',
-                     bandwidth_percentage=0.869,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_VI_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.6),
-        'stream_VI_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.6)
+                dict(
+                    operator=">",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap_1",
+                    bandwidth_percentage=0.869,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator=">",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap_2",
+                    bandwidth_percentage=0.869,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_VI_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_VI_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+        ),
     }
 }
 # WFA Test Plan Cases
 
 # Traffic Differentiation in Single BSS (Single Station)
 test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE = {
-    'phase_1': {
-        'steam_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45)
+    "phase_1": {
+        "steam_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
     },
-    'phase_2': {
-        'steam_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VI_ap_to_staut',
-                      bandwidth_percentage=0.85,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
+    "phase_2": {
+        "steam_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VI_ap_to_staut",
+                    bandwidth_percentage=0.85,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
 }
 
 test_wfa_traffic_diff_single_station_staut_VI_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45)
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
     },
-    'phase_2': {
-        'stream_BE_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VI_staut_to_ap',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65),
-    }
+    "phase_2": {
+        "stream_BE_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap",
+                    bandwidth_percentage=0.89,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
 }
 
 test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45)
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
     },
-    'phase_2': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VI_staut_to_ap',
-                      bandwidth_percentage=0.87,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
+    "phase_2": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap",
+                    bandwidth_percentage=0.87,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
 }
 
 test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45)
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
     },
-    'phase_2': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_BE_staut_to_ap',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.65)
-    }
+    "phase_2": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap",
+                    bandwidth_percentage=0.81,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
 }
 
 test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.45)
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.45,
+        ),
     },
-    'phase_2': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VO_staut_to_ap',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.65)
-    }
+    "phase_2": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VO_staut_to_ap",
+                    bandwidth_percentage=0.81,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
 }
 
 # Traffic Differentiation in Single BSS (Two Stations)
 test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE = {
-    'phase_1': {
-        'steam_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45)
+    "phase_1": {
+        "steam_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
     },
-    'phase_2': {
-        'steam_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VI_secondary_to_ap',
-                      bandwidth_percentage=0.90,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
+    "phase_2": {
+        "steam_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VI_secondary_to_ap",
+                    bandwidth_percentage=0.90,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
 }
 
 test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE = {
-    'phase_1': {
-        'steam_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45)
+    "phase_1": {
+        "steam_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
     },
-    'phase_2': {
-        'steam_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='steam_VI_staut_to_ap',
-                      bandwidth_percentage=0.88,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_secondary_to_ap_1':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_secondary_to_ap_2':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
+    "phase_2": {
+        "steam_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="steam_VI_staut_to_ap",
+                    bandwidth_percentage=0.88,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_secondary_to_ap_1": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_secondary_to_ap_2": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
 }
 
 test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK = {
-    'phase_1': {
-        'steam_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45)
+    "phase_1": {
+        "steam_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
     },
-    'phase_2': {
-        'steam_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_BE_secondary_to_ap',
-                      bandwidth_percentage=0.90,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.65)
-    }
+    "phase_2": {
+        "steam_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap",
+                    bandwidth_percentage=0.90,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BK_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
 }
 
 test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI = {
-    'phase_1': {
-        'steam_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_VO_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.45)
+    "phase_1": {
+        "steam_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VO_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.45,
+        ),
     },
-    'phase_2': {
-        'steam_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_VO_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VO_secondary_to_ap',
-                      bandwidth_percentage=0.90,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_VI_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.65)
-    }
+    "phase_2": {
+        "steam_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VO_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VO_secondary_to_ap",
+                    bandwidth_percentage=0.90,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_VI_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
 }
 
 test_wfa_acm_bit_on_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VI',
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
             max_bandwidth_percentage=0.65,
             validation=[
                 # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_secondary_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_secondary_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_secondary_to_ap_1':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65),
-        'stream_BE_secondary_to_ap_2':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_secondary_to_ap_1": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+        "stream_BE_secondary_to_ap_2": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
     }
 }
 
 test_wfa_ac_param_degrade_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VI',
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
             max_bandwidth_percentage=0.65,
             validation=[
                 # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_secondary_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_secondary_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_secondary_to_ap_1':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65),
-        'stream_BE_secondary_to_ap_2':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_secondary_to_ap_1": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+        "stream_BE_secondary_to_ap_2": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
     }
 }
diff --git a/src/antlion/test_utils/net/NetstackBaseTest.py b/src/antlion/test_utils/net/NetstackBaseTest.py
deleted file mode 100755
index 9cd0a7f..0000000
--- a/src/antlion/test_utils/net/NetstackBaseTest.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.base_test import BaseTestClass
-from antlion import asserts
-
-
-class NetstackBaseTest(BaseTestClass):
-    def __init__(self, controllers):
-        BaseTestClass.__init__(self, controllers)
diff --git a/src/antlion/test_utils/net/arduino_test_utils.py b/src/antlion/test_utils/net/arduino_test_utils.py
deleted file mode 100644
index 45fc21a..0000000
--- a/src/antlion/test_utils/net/arduino_test_utils.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion import utils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-
-ARDUINO = "/root/arduino/arduino-1.8.5/arduino "
-CONNECT_WIFI = "/arduino/connect_wifi/connect_wifi.ino"
-DISCONNECT_WIFI = "/arduino/disconnect_wifi/disconnect_wifi.ino"
-SSID = wutils.WifiEnums.SSID_KEY
-PWD = wutils.WifiEnums.PWD_KEY
-
-
-def connect_wifi(wd, network=None):
-    """Connect wifi on arduino wifi dongle
-
-    Args:
-        wd - wifi dongle object
-        network - wifi network to connect to
-    """
-    wd.log.info("Flashing connect_wifi.ino onto dongle")
-    cmd = "locate %s" % CONNECT_WIFI
-    file_path = utils.exe_cmd(cmd).decode("utf-8", "ignore").split()[-1]
-    write_status = wd.write(ARDUINO, file_path, network)
-    asserts.assert_true(write_status, "Failed to flash connect wifi")
-    wd.log.info("Flashing complete")
-    wifi_status = wd.wifi_status()
-    asserts.assert_true(wifi_status, "Failed to connect to %s" % network)
-    ping_status = wd.ping_status()
-    asserts.assert_true(ping_status, "Failed to connect to internet")
-
-
-def disconnect_wifi(wd):
-    """Disconnect wifi on arduino wifi dongle
-
-    Args:
-        wd - wifi dongle object
-
-    Returns:
-        True - if wifi is disconnected
-        False - if not
-    """
-    wd.log.info("Flashing disconnect_wifi.ino onto dongle")
-    cmd = "locate %s" % DISCONNECT_WIFI
-    file_path = utils.exe_cmd(cmd).decode("utf-8", "ignore").rstrip()
-    write_status = wd.write(ARDUINO, file_path)
-    asserts.assert_true(write_status, "Failed to flash disconnect wifi")
-    wd.log.info("Flashing complete")
-    wifi_status = wd.wifi_status(False)
-    asserts.assert_true(not wifi_status, "Failed to disconnect wifi")
diff --git a/src/antlion/test_utils/net/connectivity_const.py b/src/antlion/test_utils/net/connectivity_const.py
index a54654f..05495f0 100644
--- a/src/antlion/test_utils/net/connectivity_const.py
+++ b/src/antlion/test_utils/net/connectivity_const.py
@@ -84,18 +84,15 @@
 
 DNS_SUPPORT_TYPE = {
     DNS_GOOGLE_HOSTNAME: ["Do53", "DoT", "DoH"],
-    DNS_CLOUDFLARE_HOSTNAME: ["Do53","DoT"],
-    DOH_CLOUDFLARE_HOSTNAME: ["DoH"]
+    DNS_CLOUDFLARE_HOSTNAME: ["Do53", "DoT"],
+    DOH_CLOUDFLARE_HOSTNAME: ["DoH"],
 }
 
 DNS_GOOGLE_ADDR_V4 = ["8.8.4.4", "8.8.8.8"]
-DNS_GOOGLE_ADDR_V6 = ["2001:4860:4860::8888",
-                      "2001:4860:4860::8844"]
+DNS_GOOGLE_ADDR_V6 = ["2001:4860:4860::8888", "2001:4860:4860::8844"]
 DNS_CLOUDFLARE_ADDR_V4 = ["1.1.1.1", "1.0.0.1"]
 DOH_CLOUDFLARE_ADDR_V4 = ["104.16.248.249", "104.16.249.249"]
-DOH_CLOUDFLARE_ADDR_V6 = ["2606:4700::6810:f8f9",
-                          "2606:4700::6810:f9f9"]
-
+DOH_CLOUDFLARE_ADDR_V6 = ["2606:4700::6810:f8f9", "2606:4700::6810:f9f9"]
 
 # IpSec constants
 SOCK_STREAM = 1
@@ -118,9 +115,10 @@
 
 # Constants for VpnProfile
 class VpnProfile(object):
-    """ This class contains all the possible
-        parameters required for VPN connection
+    """This class contains all the possible
+    parameters required for VPN connection
     """
+
     NAME = "name"
     TYPE = "type"
     SERVER = "server"
@@ -140,8 +138,8 @@
 
 # Enums for VPN profile types
 class VpnProfileType(enum.Enum):
-    """ Integer constant for each type of VPN
-    """
+    """Integer constant for each type of VPN"""
+
     PPTP = 0
     L2TP_IPSEC_PSK = 1
     L2TP_IPSEC_RSA = 2
@@ -155,9 +153,10 @@
 
 # Constants for config file
 class VpnReqParams(object):
-    """ Config file parameters required for
-        VPN connection
+    """Config file parameters required for
+    VPN connection
     """
+
     vpn_server_addresses = "vpn_server_addresses"
     vpn_verify_addresses = "vpn_verify_addresses"
     vpn_username = "vpn_username"
diff --git a/src/antlion/test_utils/net/connectivity_test_utils.py b/src/antlion/test_utils/net/connectivity_test_utils.py
deleted file mode 100644
index 6841de9..0000000
--- a/src/antlion/test_utils/net/connectivity_test_utils.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion.test_utils.net import connectivity_const as cconst
-from queue import Empty
-
-def _listen_for_keepalive_event(ad, key, msg, ka_event):
-    """Listen for keepalive event and return status
-
-    Args:
-        ad: DUT object
-        key: keepalive key
-        msg: Error message
-        event: Keepalive event type
-    """
-    ad.droid.socketKeepaliveStartListeningForEvent(key, ka_event)
-    try:
-        event = ad.ed.pop_event("SocketKeepaliveCallback")
-        status = event["data"]["socketKeepaliveEvent"] == ka_event
-    except Empty:
-        asserts.fail(msg)
-    finally:
-        ad.droid.socketKeepaliveStopListeningForEvent(key, ka_event)
-    if ka_event != "Started":
-        ad.droid.removeSocketKeepaliveReceiverKey(key)
-    if status:
-        ad.log.info("'%s' keepalive event successful" % ka_event)
-    return status
-
-def start_natt_socket_keepalive(ad, udp_encap, src_ip, dst_ip, interval = 10):
-    """Start NATT SocketKeepalive on DUT
-
-    Args:
-        ad: DUT object
-        udp_encap: udp_encap socket key
-        src_ip: IP addr of the client
-        dst_ip: IP addr of the keepalive server
-        interval: keepalive time interval
-    """
-    ad.log.info("Starting Natt Socket Keepalive")
-    key = ad.droid.startNattSocketKeepalive(udp_encap, src_ip, dst_ip, interval)
-    msg = "Failed to receive confirmation of starting natt socket keeaplive"
-    status = _listen_for_keepalive_event(ad, key, msg, "Started")
-    return key if status else None
-
-def start_tcp_socket_keepalive(ad, socket, time_interval = 10):
-    """Start TCP socket keepalive on DUT
-
-    Args:
-        ad: DUT object
-        socket: TCP socket key
-        time_interval: Keepalive time interval
-    """
-    ad.log.info("Starting TCP Socket Keepalive")
-    key = ad.droid.startTcpSocketKeepalive(socket, time_interval)
-    msg = "Failed to receive confirmation of starting tcp socket keeaplive"
-    status = _listen_for_keepalive_event(ad, key, msg, "Started")
-    return key if status else None
-
-def socket_keepalive_error(ad, key):
-    """Verify Error callback
-
-    Args:
-        ad: DUT object
-        key: Keepalive key
-    """
-    ad.log.info("Verify Error callback on keepalive: %s" % key)
-    msg = "Failed to receive confirmation of Error callback"
-    return _listen_for_keepalive_event(ad, key, msg, "Error")
-
-def socket_keepalive_data_received(ad, key):
-    """Verify OnDataReceived callback
-
-    Args:
-        ad: DUT object
-        key: Keepalive key
-    """
-    ad.log.info("Verify OnDataReceived callback on keepalive: %s" % key)
-    msg = "Failed to receive confirmation of OnDataReceived callback"
-    return _listen_for_keepalive_event(ad, key, msg, "OnDataReceived")
-
-def stop_socket_keepalive(ad, key):
-    """Stop SocketKeepalive on DUT
-
-    Args:
-        ad: DUT object
-        key: Keepalive key
-    """
-    ad.log.info("Stopping Socket keepalive: %s" % key)
-    ad.droid.stopSocketKeepalive(key)
-    msg = "Failed to receive confirmation of stopping socket keepalive"
-    return _listen_for_keepalive_event(ad, key, msg, "Stopped")
-
-
-def set_private_dns(ad, dns_mode, hostname=None):
-    """ Set private DNS mode and DNS server hostname on DUT
-
-    :param ad: Device under test (DUT)
-    :param dns_mode: DNS mode, including OFF, OPPORTUNISTIC, STRICT
-    :param hostname: DNS server hostname
-    """
-    """ Set private DNS mode on dut """
-    if dns_mode == cconst.PRIVATE_DNS_MODE_OFF:
-        ad.droid.setPrivateDnsMode(False)
-    else:
-        ad.droid.setPrivateDnsMode(True, hostname)
-
-    mode = ad.droid.getPrivateDnsMode()
-    host = ad.droid.getPrivateDnsSpecifier()
-    ad.log.info("DNS mode is %s and DNS server is %s" % (mode, host))
diff --git a/src/antlion/test_utils/net/ipsec_test_utils.py b/src/antlion/test_utils/net/ipsec_test_utils.py
deleted file mode 100644
index 657aa7f..0000000
--- a/src/antlion/test_utils/net/ipsec_test_utils.py
+++ /dev/null
@@ -1,262 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import binascii
-import os
-import re
-
-from antlion.test_utils.net import connectivity_const as cconst
-from antlion import asserts
-
-PKTS = 5
-
-
-def make_key(len_bits):
-    asserts.assert_true(
-        len_bits % 8 == 0, "Unexpected key length. Should be a multiple "
-        "of 8, got %s" % len_bits)
-    return binascii.hexlify(os.urandom(int(len_bits / 8))).decode()
-
-
-def allocate_spis(ad, ip_a, ip_b, in_spi=None, out_spi=None):
-    """ Allocate in and out SPIs for android device
-
-    Args:
-      1. ad : android device object
-      2. ip_a : local IP address for In SPI
-      3. ip_b : remote IP address for Out SPI
-      4. in_spi : Generate In SPI with this value
-      5. out_spi : Generate Out SPI with this value
-
-    Returns:
-      List of In and Out SPI
-    """
-    in_spi_key = ad.droid.ipSecAllocateSecurityParameterIndex(ip_a, in_spi)
-    in_spi = ad.droid.ipSecGetSecurityParameterIndex(in_spi_key)
-    ad.log.info("In SPI: %s" % hex(in_spi))
-
-    out_spi_key = ad.droid.ipSecAllocateSecurityParameterIndex(ip_b, out_spi)
-    out_spi = ad.droid.ipSecGetSecurityParameterIndex(out_spi_key)
-    ad.log.info("Out SPI: %s" % hex(out_spi))
-
-    asserts.assert_true(in_spi and out_spi, "Failed to allocate SPIs")
-    return [in_spi_key, out_spi_key]
-
-
-def release_spis(ad, spis):
-    """ Destroy SPIs
-
-    Args:
-      1. ad : android device object
-      2. spis : list of SPI keys to destroy
-    """
-    for spi_key in spis:
-        ad.droid.ipSecReleaseSecurityParameterIndex(spi_key)
-        spi = ad.droid.ipSecGetSecurityParameterIndex(spi_key)
-        asserts.assert_true(not spi, "Failed to release SPI")
-
-
-def create_transport_mode_transforms(ad,
-                                     spis,
-                                     ip_a,
-                                     ip_b,
-                                     crypt_algo,
-                                     crypt_key,
-                                     auth_algo,
-                                     auth_key,
-                                     trunc_bit,
-                                     udp_encap_sock=None):
-    """ Create transport mode transforms on the device
-
-    Args:
-      1. ad : android device object
-      2. spis : spi keys of the SPIs created
-      3. ip_a : local IP addr
-      4. ip_b : remote IP addr
-      5. crypt_key : encryption key
-      6. auth_key : authentication key
-      7. udp_encap_sock : set udp encapsulation for ESP packets
-
-    Returns:
-      List of In and Out Transforms
-    """
-    in_transform = ad.droid.ipSecCreateTransportModeTransform(
-        crypt_algo, crypt_key, auth_algo, auth_key, trunc_bit, spis[0], ip_b,
-        udp_encap_sock)
-    ad.log.info("In Transform: %s" % in_transform)
-    out_transform = ad.droid.ipSecCreateTransportModeTransform(
-        crypt_algo, crypt_key, auth_algo, auth_key, trunc_bit, spis[1], ip_a,
-        udp_encap_sock)
-    ad.log.info("Out Transform: %s" % out_transform)
-    asserts.assert_true(in_transform and out_transform,
-                        "Failed to create transforms")
-    return [in_transform, out_transform]
-
-
-def destroy_transport_mode_transforms(ad, transforms):
-    """ Destroy transforms on the device
-
-    Args:
-      1. ad : android device object
-      2. transforms : list to transform keys to destroy
-    """
-    for transform in transforms:
-        ad.droid.ipSecDestroyTransportModeTransform(transform)
-        status = ad.droid.ipSecGetTransformStatus(transform)
-        ad.log.info("Transform status: %s" % status)
-        asserts.assert_true(not status, "Failed to destroy transform")
-
-
-def apply_transport_mode_transforms_file_descriptors(ad, fd, transforms):
-    """ Apply transpot mode transform to FileDescriptor object
-
-    Args:
-      1. ad - android device object
-      2. fd - FileDescriptor key
-      3. transforms - list of in and out transforms
-    """
-    in_transform = ad.droid.ipSecApplyTransportModeTransformFileDescriptor(
-        fd, cconst.DIRECTION_IN, transforms[0])
-    out_transform = ad.droid.ipSecApplyTransportModeTransformFileDescriptor(
-        fd, cconst.DIRECTION_OUT, transforms[1])
-    asserts.assert_true(in_transform and out_transform,
-                        "Failed to apply transform")
-    ip_xfrm_state = ad.adb.shell("ip -s xfrm state")
-    ad.log.info("XFRM STATE:\n%s\n" % ip_xfrm_state)
-    ip_xfrm_policy = ad.adb.shell("ip -s xfrm policy")
-    ad.log.info("XFRM POLICY:\n%s\n" % ip_xfrm_policy)
-
-
-def remove_transport_mode_transforms_file_descriptors(ad, fd):
-    """ Remove transport mode transform from FileDescriptor object
-
-    Args:
-      1. ad - android device object
-      2. socket - FileDescriptor key
-    """
-    status = ad.droid.ipSecRemoveTransportModeTransformsFileDescriptor(fd)
-    asserts.assert_true(status, "Failed to remove transform")
-
-
-def apply_transport_mode_transforms_datagram_socket(ad, socket, transforms):
-    """ Apply transport mode transform to DatagramSocket object
-
-    Args:
-      1. ad - android device object
-      2. socket - DatagramSocket object key
-      3. transforms - list of in and out transforms
-    """
-    in_tfrm_status = ad.droid.ipSecApplyTransportModeTransformDatagramSocket(
-        socket, cconst.DIRECTION_IN, transforms[0])
-    out_tfrm_status = ad.droid.ipSecApplyTransportModeTransformDatagramSocket(
-        socket, cconst.DIRECTION_OUT, transforms[1])
-    asserts.assert_true(in_tfrm_status and out_tfrm_status,
-                        "Failed to apply transform")
-
-    ip_xfrm_state = ad.adb.shell("ip -s xfrm state")
-    ad.log.info("XFRM STATE:\n%s\n" % ip_xfrm_state)
-
-
-def remove_transport_mode_transforms_datagram_socket(ad, socket):
-    """ Remove transport mode transform from DatagramSocket object
-
-    Args:
-      1. ad - android device object
-      2. socket - DatagramSocket object key
-    """
-    status = ad.droid.ipSecRemoveTransportModeTransformsDatagramSocket(socket)
-    asserts.assert_true(status, "Failed to remove transform")
-
-
-def apply_transport_mode_transforms_socket(ad, socket, transforms):
-    """ Apply transport mode transform to Socket object
-
-    Args:
-      1. ad - android device object
-      2. socket - Socket object key
-      3. transforms - list of in and out transforms
-    """
-    in_tfrm_status = ad.droid.ipSecApplyTransportModeTransformSocket(
-        socket, cconst.DIRECTION_IN, transforms[0])
-    out_tfrm_status = ad.droid.ipSecApplyTransportModeTransformSocket(
-        socket, cconst.DIRECTION_OUT, transforms[1])
-    asserts.assert_true(in_tfrm_status and out_tfrm_status,
-                        "Failed to apply transform")
-
-    ip_xfrm_state = ad.adb.shell("ip -s xfrm state")
-    ad.log.info("XFRM STATE:\n%s\n" % ip_xfrm_state)
-
-
-def remove_transport_mode_transforms_socket(ad, socket):
-    """ Remove transport mode transform from Socket object
-
-    Args:
-      1. ad - android device object
-      2. socket - Socket object key
-    """
-    status = ad.droid.ipSecRemoveTransportModeTransformsSocket(socket)
-    asserts.assert_true(status, "Failed to remove transform")
-
-
-def verify_esp_packets(ads):
-    """ Verify that encrypted ESP packets are sent
-
-    Args:
-      1. ads - Verify ESP packets on all devices
-    """
-    for ad in ads:
-        ip_xfrm_state = ad.adb.shell("ip -s xfrm state")
-        ad.log.info("XFRM STATE on %s:\n%s\n" % (ad.serial, ip_xfrm_state))
-        pattern = re.findall(r'\d+\(packets\)', ip_xfrm_state)
-        esp_pkts = False
-        for _ in pattern:
-            if int(_.split('(')[0]) >= PKTS:
-                esp_pkts = True
-                break
-        asserts.assert_true(esp_pkts, "Could not find ESP pkts")
-
-
-def generate_random_crypt_auth_combo():
-    """ Generate every possible combination of crypt and auth keys,
-        auth algo, trunc bits supported by IpSecManager
-    """
-    crypt_key_length = [128, 192, 256]
-    auth_method_key = {
-        cconst.AUTH_HMAC_MD5: 128,
-        cconst.AUTH_HMAC_SHA1: 160,
-        cconst.AUTH_HMAC_SHA256: 256,
-        cconst.AUTH_HMAC_SHA384: 384,
-        cconst.AUTH_HMAC_SHA512: 512
-    }
-    auth_method_trunc = {
-        cconst.AUTH_HMAC_MD5: list(range(96, 136, 8)),
-        cconst.AUTH_HMAC_SHA1: list(range(96, 168, 8)),
-        cconst.AUTH_HMAC_SHA256: list(range(96, 264, 8)),
-        cconst.AUTH_HMAC_SHA384: list(range(192, 392, 8)),
-        cconst.AUTH_HMAC_SHA512: list(range(256, 520, 8))
-    }
-    return_list = []
-    for c in crypt_key_length:
-        for k in auth_method_key.keys():
-            auth_key = auth_method_key[k]
-            lst = auth_method_trunc[k]
-            for t in lst:
-                combo = []
-                combo.append(c)
-                combo.append(k)
-                combo.append(auth_key)
-                combo.append(t)
-                return_list.append(combo)
-
-    return return_list
diff --git a/src/antlion/test_utils/net/net_test_utils.py b/src/antlion/test_utils/net/net_test_utils.py
index c4b6d0c..4eb47ac 100644
--- a/src/antlion/test_utils/net/net_test_utils.py
+++ b/src/antlion/test_utils/net/net_test_utils.py
@@ -19,7 +19,6 @@
 import time
 import urllib.request
 
-from antlion import asserts
 from antlion import signals
 from antlion import utils
 from antlion.controllers import adb
@@ -29,6 +28,8 @@
 from antlion.utils import stop_standing_subprocess
 from antlion.test_utils.net import connectivity_const as cconst
 
+from mobly import asserts
+
 VPN_CONST = cconst.VpnProfile
 VPN_TYPE = cconst.VpnProfileType
 VPN_PARAMS = cconst.VpnReqParams
@@ -55,14 +56,14 @@
     Args:
         ad: android device object
     """
-    commands = ["pm grant com.android.chrome "
-                "android.permission.READ_EXTERNAL_STORAGE",
-                "pm grant com.android.chrome "
-                "android.permission.WRITE_EXTERNAL_STORAGE",
-                "rm /data/local/chrome-command-line",
-                "am set-debug-app --persistent com.android.chrome",
-                'echo "chrome --no-default-browser-check --no-first-run '
-                '--disable-fre" > /data/local/tmp/chrome-command-line']
+    commands = [
+        "pm grant com.android.chrome " "android.permission.READ_EXTERNAL_STORAGE",
+        "pm grant com.android.chrome " "android.permission.WRITE_EXTERNAL_STORAGE",
+        "rm /data/local/chrome-command-line",
+        "am set-debug-app --persistent com.android.chrome",
+        'echo "chrome --no-default-browser-check --no-first-run '
+        '--disable-fre" > /data/local/tmp/chrome-command-line',
+    ]
     for cmd in commands:
         try:
             ad.adb.shell(cmd)
@@ -117,31 +118,35 @@
     time.sleep(cconst.VPN_TIMEOUT)
 
     connected_vpn_info = ad.droid.vpnGetLegacyVpnInfo()
-    asserts.assert_equal(connected_vpn_info["state"],
-                         cconst.VPN_STATE_CONNECTED,
-                         "Unable to establish VPN connection for %s"
-                         % vpn_profile)
+    asserts.assert_equal(
+        connected_vpn_info["state"],
+        cconst.VPN_STATE_CONNECTED,
+        "Unable to establish VPN connection for %s" % vpn_profile,
+    )
 
     ping_result = verify_ping_to_vpn_ip(ad, vpn_ping_addr)
     ip_xfrm_state = ad.adb.shell("ip xfrm state")
-    match_obj = re.search(r'hmac(.*)', "%s" % ip_xfrm_state)
+    match_obj = re.search(r"hmac(.*)", "%s" % ip_xfrm_state)
     if match_obj:
         ip_xfrm_state = format(match_obj.group(0)).split()
         ad.log.info("HMAC for ESP is %s " % ip_xfrm_state[0])
 
     ad.droid.vpnStopLegacyVpn()
-    asserts.assert_true(ping_result,
-                        "Ping to the internal IP failed. "
-                        "Expected to pass as VPN is connected")
+    asserts.assert_true(
+        ping_result,
+        "Ping to the internal IP failed. " "Expected to pass as VPN is connected",
+    )
 
     connected_vpn_info = ad.droid.vpnGetLegacyVpnInfo()
-    asserts.assert_true(not connected_vpn_info,
-                        "Unable to terminate VPN connection for %s"
-                        % vpn_profile)
+    asserts.assert_true(
+        not connected_vpn_info,
+        "Unable to terminate VPN connection for %s" % vpn_profile,
+    )
 
 
-def download_load_certs(ad, vpn_params, vpn_type, vpn_server_addr,
-                        ipsec_server_type, log_path):
+def download_load_certs(
+    ad, vpn_params, vpn_type, vpn_server_addr, ipsec_server_type, log_path
+):
     """Download the certificates from VPN server and push to sdcard of DUT.
 
     Args:
@@ -155,17 +160,21 @@
     Returns:
       Client cert file name on DUT's sdcard
     """
-    url = "http://%s%s%s" % (vpn_server_addr,
-                             vpn_params['cert_path_vpnserver'],
-                             vpn_params['client_pkcs_file_name'])
+    url = "http://%s%s%s" % (
+        vpn_server_addr,
+        vpn_params["cert_path_vpnserver"],
+        vpn_params["client_pkcs_file_name"],
+    )
     logging.info("URL is: %s" % url)
     if vpn_server_addr == LOCALHOST:
         ad.droid.httpDownloadFile(url, "/sdcard/")
-        return vpn_params['client_pkcs_file_name']
+        return vpn_params["client_pkcs_file_name"]
 
-    local_cert_name = "%s_%s_%s" % (vpn_type.name,
-                                    ipsec_server_type,
-                                    vpn_params['client_pkcs_file_name'])
+    local_cert_name = "%s_%s_%s" % (
+        vpn_type.name,
+        ipsec_server_type,
+        vpn_params["client_pkcs_file_name"],
+    )
     local_file_path = os.path.join(log_path, local_cert_name)
     try:
         ret = urllib.request.urlopen(url)
@@ -178,12 +187,9 @@
     return local_cert_name
 
 
-def generate_legacy_vpn_profile(ad,
-                                vpn_params,
-                                vpn_type,
-                                vpn_server_addr,
-                                ipsec_server_type,
-                                log_path):
+def generate_legacy_vpn_profile(
+    ad, vpn_params, vpn_type, vpn_server_addr, ipsec_server_type, log_path
+):
     """Generate legacy VPN profile for a VPN.
 
     Args:
@@ -197,12 +203,13 @@
     Returns:
       Vpn profile
     """
-    vpn_profile = {VPN_CONST.USER: vpn_params['vpn_username'],
-                   VPN_CONST.PWD: vpn_params['vpn_password'],
-                   VPN_CONST.TYPE: vpn_type.value,
-                   VPN_CONST.SERVER: vpn_server_addr, }
-    vpn_profile[VPN_CONST.NAME] = "test_%s_%s" % (vpn_type.name,
-                                                  ipsec_server_type)
+    vpn_profile = {
+        VPN_CONST.USER: vpn_params["vpn_username"],
+        VPN_CONST.PWD: vpn_params["vpn_password"],
+        VPN_CONST.TYPE: vpn_type.value,
+        VPN_CONST.SERVER: vpn_server_addr,
+    }
+    vpn_profile[VPN_CONST.NAME] = "test_%s_%s" % (vpn_type.name, ipsec_server_type)
     if vpn_type.name == "PPTP":
         vpn_profile[VPN_CONST.NAME] = "test_%s" % vpn_type.name
 
@@ -210,17 +217,13 @@
     rsa_set = set(["L2TP_IPSEC_RSA", "IPSEC_XAUTH_RSA", "IPSEC_HYBRID_RSA"])
 
     if vpn_type.name in psk_set:
-        vpn_profile[VPN_CONST.IPSEC_SECRET] = vpn_params['psk_secret']
+        vpn_profile[VPN_CONST.IPSEC_SECRET] = vpn_params["psk_secret"]
     elif vpn_type.name in rsa_set:
-        cert_name = download_load_certs(ad,
-                                        vpn_params,
-                                        vpn_type,
-                                        vpn_server_addr,
-                                        ipsec_server_type,
-                                        log_path)
-        vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split('.')[0]
-        ad.droid.installCertificate(vpn_profile, cert_name,
-                                    vpn_params['cert_password'])
+        cert_name = download_load_certs(
+            ad, vpn_params, vpn_type, vpn_server_addr, ipsec_server_type, log_path
+        )
+        vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split(".")[0]
+        ad.droid.installCertificate(vpn_profile, cert_name, vpn_params["cert_password"])
     else:
         vpn_profile[VPN_CONST.MPPE] = "mppe"
 
@@ -250,25 +253,35 @@
         vpn_profile[VPN_CONST.PWD] = vpn_params["vpn_password"]
         vpn_profile[VPN_CONST.IPSEC_ID] = vpn_params["vpn_identity"]
         cert_name = download_load_certs(
-            ad, vpn_params, vpn_type, vpn_params["server_addr"],
-            "IKEV2_IPSEC_USER_PASS", log_path)
-        vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split('.')[0]
-        ad.droid.installCertificate(
-            vpn_profile, cert_name, vpn_params['cert_password'])
+            ad,
+            vpn_params,
+            vpn_type,
+            vpn_params["server_addr"],
+            "IKEV2_IPSEC_USER_PASS",
+            log_path,
+        )
+        vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split(".")[0]
+        ad.droid.installCertificate(vpn_profile, cert_name, vpn_params["cert_password"])
     elif vpn_type.name == "IKEV2_IPSEC_PSK":
         vpn_profile[VPN_CONST.IPSEC_ID] = vpn_params["vpn_identity"]
         vpn_profile[VPN_CONST.IPSEC_SECRET] = vpn_params["psk_secret"]
     else:
         vpn_profile[VPN_CONST.IPSEC_ID] = "%s@%s" % (
-            vpn_params["vpn_identity"], server_addr)
+            vpn_params["vpn_identity"],
+            server_addr,
+        )
         logging.info("ID: %s@%s" % (vpn_params["vpn_identity"], server_addr))
         cert_name = download_load_certs(
-            ad, vpn_params, vpn_type, vpn_params["server_addr"],
-            "IKEV2_IPSEC_RSA", log_path)
-        vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split('.')[0]
-        vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split('.')[0]
-        ad.droid.installCertificate(
-            vpn_profile, cert_name, vpn_params['cert_password'])
+            ad,
+            vpn_params,
+            vpn_type,
+            vpn_params["server_addr"],
+            "IKEV2_IPSEC_RSA",
+            log_path,
+        )
+        vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split(".")[0]
+        vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split(".")[0]
+        ad.droid.installCertificate(vpn_profile, cert_name, vpn_params["cert_password"])
 
     return vpn_profile
 
@@ -287,21 +300,20 @@
 
     file_name = "%s/tcpdump_%s_%s.pcap" % (TCPDUMP_PATH, ad.serial, test_name)
     ad.log.info("tcpdump file is %s", file_name)
-    cmd = "adb -s {} shell tcpdump -i {} -s0 -w {}".format(ad.serial,
-                                                           interface, file_name)
+    cmd = "adb -s {} shell tcpdump -i {} -s0 -w {}".format(
+        ad.serial, interface, file_name
+    )
     try:
         return start_standing_subprocess(cmd, 5)
     except Exception:
-        ad.log.exception('Could not start standing process %s' % repr(cmd))
+        ad.log.exception("Could not start standing process %s" % repr(cmd))
 
     return None
 
 
-def stop_tcpdump(ad,
-                 proc,
-                 test_name,
-                 pull_dump=True,
-                 adb_pull_timeout=adb.DEFAULT_ADB_PULL_TIMEOUT):
+def stop_tcpdump(
+    ad, proc, test_name, pull_dump=True, adb_pull_timeout=adb.DEFAULT_ADB_PULL_TIMEOUT
+):
     """Stops tcpdump on any iface.
 
        Pulls the tcpdump file in the tcpdump dir if necessary.
@@ -326,8 +338,7 @@
     if pull_dump:
         log_path = os.path.join(ad.device_log_path, "TCPDUMP_%s" % ad.serial)
         os.makedirs(log_path, exist_ok=True)
-        ad.adb.pull("%s/. %s" % (TCPDUMP_PATH, log_path),
-                    timeout=adb_pull_timeout)
+        ad.adb.pull("%s/. %s" % (TCPDUMP_PATH, log_path), timeout=adb_pull_timeout)
         ad.adb.shell("rm -rf %s/*" % TCPDUMP_PATH, ignore_status=True)
         file_name = "tcpdump_%s_%s.pcap" % (ad.serial, test_name)
         return "%s/%s" % (log_path, file_name)
@@ -349,15 +360,26 @@
     ad.log.info("Starting tcpdump on gce server")
 
     # pcap file name
-    fname = "/tmp/%s_%s_%s_%s" % \
-        (test_name, ad.model, ad.serial,
-         time.strftime('%Y-%m-%d_%H-%M-%S', time.localtime(time.time())))
+    fname = "/tmp/%s_%s_%s_%s" % (
+        test_name,
+        ad.model,
+        ad.serial,
+        time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())),
+    )
 
     # start tcpdump
-    tcpdump_cmd = "sudo bash -c \'tcpdump -i %s -w %s.pcap port %s > \
-        %s.txt 2>&1 & echo $!\'" % (gce["interface"], fname, dest_port, fname)
-    gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % \
-        (GCE_SSH, gce["project"], gce["zone"], gce["username"], gce["hostname"])
+    tcpdump_cmd = (
+        "sudo bash -c 'tcpdump -i %s -w %s.pcap port %s > \
+        %s.txt 2>&1 & echo $!'"
+        % (gce["interface"], fname, dest_port, fname)
+    )
+    gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % (
+        GCE_SSH,
+        gce["project"],
+        gce["zone"],
+        gce["username"],
+        gce["hostname"],
+    )
     gce_ssh_cmd = '%s "%s"' % (gcloud_ssh_cmd, tcpdump_cmd)
     utils.exe_cmd(gce_ssh_cmd)
 
@@ -385,8 +407,13 @@
 
     # stop tcpdump
     tcpdump_cmd = "sudo kill %s" % tcpdump_pid
-    gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % \
-        (GCE_SSH, gce["project"], gce["zone"], gce["username"], gce["hostname"])
+    gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % (
+        GCE_SSH,
+        gce["project"],
+        gce["zone"],
+        gce["username"],
+        gce["hostname"],
+    )
     gce_ssh_cmd = '%s "%s"' % (gcloud_ssh_cmd, tcpdump_cmd)
     utils.exe_cmd(gce_ssh_cmd)
 
@@ -399,19 +426,23 @@
         return None
 
     # pull pcap file
-    gcloud_scp_cmd = "%s --project=%s --zone=%s %s@%s:" % \
-        (GCE_SCP, gce["project"], gce["zone"], gce["username"], gce["hostname"])
-    pull_file = '%s%s.pcap %s/' % (gcloud_scp_cmd, fname, ad.device_log_path)
+    gcloud_scp_cmd = "%s --project=%s --zone=%s %s@%s:" % (
+        GCE_SCP,
+        gce["project"],
+        gce["zone"],
+        gce["username"],
+        gce["hostname"],
+    )
+    pull_file = "%s%s.pcap %s/" % (gcloud_scp_cmd, fname, ad.device_log_path)
     utils.exe_cmd(pull_file)
-    if not os.path.exists(
-        "%s/%s.pcap" % (ad.device_log_path, fname.split('/')[-1])):
+    if not os.path.exists("%s/%s.pcap" % (ad.device_log_path, fname.split("/")[-1])):
         raise signals.TestFailure("Failed to pull tcpdump from gce server")
 
     # delete pcaps
     utils.exe_cmd('%s "sudo rm %s.*"' % (gcloud_ssh_cmd, fname))
 
     # return pcap file
-    pcap_file = "%s/%s.pcap" % (ad.device_log_path, fname.split('/')[-1])
+    pcap_file = "%s/%s.pcap" % (ad.device_log_path, fname.split("/")[-1])
     return pcap_file
 
 
@@ -467,9 +498,9 @@
     # (equivalent to a 9s timeout)
     for _ in range(0, 10):
         new_ifaces = set(get_if_list()) - old_set
-        asserts.assert_true(len(new_ifaces) < 2,
-                            "Too many new interfaces after turning on "
-                            "tethering")
+        asserts.assert_true(
+            len(new_ifaces) < 2, "Too many new interfaces after turning on " "tethering"
+        )
         if len(new_ifaces) == 1:
             # enable the new iface before return
             new_iface = new_ifaces.pop()
@@ -498,10 +529,12 @@
     result = job.run([conf.prog.ifconfig])
     if result.exit_status:
         raise asserts.fail(
-            "Failed to execute ifconfig: {}".format(plain_str(result.stderr)))
+            "Failed to execute ifconfig: {}".format(plain_str(result.stderr))
+        )
 
     interfaces = [
-        line[:line.find(':')] for line in plain_str(result.stdout).splitlines()
+        line[: line.find(":")]
+        for line in plain_str(result.stdout).splitlines()
         if ": flags" in line.lower()
     ]
     return interfaces
@@ -545,4 +578,5 @@
     result = job.run("sudo ifconfig %s up" % (iface), ignore_status=True)
     if result.exit_status:
         raise asserts.fail(
-            "Failed to execute ifconfig: {}".format(plain_str(result.stderr)))
+            "Failed to execute ifconfig: {}".format(plain_str(result.stderr))
+        )
diff --git a/src/antlion/test_utils/net/nsd_const.py b/src/antlion/test_utils/net/nsd_const.py
deleted file mode 100644
index 6b53f23..0000000
--- a/src/antlion/test_utils/net/nsd_const.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-######################################################
-# NsdManager.RegistrationListener events
-######################################################
-REG_LISTENER_EVENT = "NsdRegistrationListener"
-
-# event type - using REG_LISTENER_CALLBACK
-REG_LISTENER_EVENT_ON_REG_FAILED = "OnRegistrationFailed"
-REG_LISTENER_EVENT_ON_SERVICE_REGISTERED = "OnServiceRegistered"
-REG_LISTENER_EVENT_ON_SERVICE_UNREG = "OnServiceUnregistered"
-REG_LISTENER_EVENT_ON_UNREG_FAILED = "OnUnregistrationFailed"
-
-# event data keys
-REG_LISTENER_DATA_ID = "id"
-REG_LISTENER_CALLBACK = "callback"
-REG_LISTENER_ERROR_CODE = "error_code"
-
-######################################################
-# NsdManager.DiscoveryListener events
-######################################################
-DISCOVERY_LISTENER_EVENT = "NsdDiscoveryListener"
-
-# event type - using DISCOVERY_LISTENER_DATA_CALLBACK
-DISCOVERY_LISTENER_EVENT_ON_DISCOVERY_STARTED = "OnDiscoveryStarted"
-DISCOVERY_LISTENER_EVENT_ON_DISCOVERY_STOPPED = "OnDiscoveryStopped"
-DISCOVERY_LISTENER_EVENT_ON_SERVICE_FOUND = "OnServiceFound"
-DISCOVERY_LISTENER_EVENT_ON_SERVICE_LOST = "OnServiceLost"
-DISCOVERY_LISTENER_EVENT_ON_START_DISCOVERY_FAILED = "OnStartDiscoveryFailed"
-DISCOVERY_LISTENER_EVENT_ON_STOP_DISCOVERY_FAILED = "OnStopDiscoveryFailed"
-
-# event data keys
-DISCOVERY_LISTENER_DATA_ID = "id"
-DISCOVERY_LISTENER_DATA_CALLBACK = "callback"
-DISCOVERY_LISTENER_DATA_SERVICE_TYPE = "service_type"
-DISCOVERY_LISTENER_DATA_ERROR_CODE = "error_code"
-
-######################################################
-# NsdManager.ResolveListener events
-######################################################
-RESOLVE_LISTENER_EVENT = "NsdResolveListener"
-
-# event type using RESOLVE_LISTENER_DATA_CALLBACK
-RESOLVE_LISTENER_EVENT_ON_RESOLVE_FAIL = "OnResolveFail"
-RESOLVE_LISTENER_EVENT_ON_SERVICE_RESOLVED = "OnServiceResolved"
-
-# event data keys
-RESOLVE_LISTENER_DATA_ID = "id"
-RESOLVE_LISTENER_DATA_CALLBACK = "callback"
-RESOLVE_LISTENER_DATA_ERROR_CODE = "error_code"
-
-######################################################
-# NsdServiceInfo elements
-######################################################
-NSD_SERVICE_INFO_HOST = "serviceInfoHost"
-NSD_SERVICE_INFO_PORT = "serviceInfoPort"
-NSD_SERVICE_INFO_SERVICE_NAME = "serviceInfoServiceName"
-NSD_SERVICE_INFO_SERVICE_TYPE = "serviceInfoServiceType"
diff --git a/src/antlion/test_utils/net/socket_test_utils.py b/src/antlion/test_utils/net/socket_test_utils.py
deleted file mode 100644
index a2d695c..0000000
--- a/src/antlion/test_utils/net/socket_test_utils.py
+++ /dev/null
@@ -1,293 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import queue
-import threading
-import time
-
-from antlion.test_utils.net import connectivity_const as cconst
-from antlion import asserts
-
-MSG = "Test message "
-PKTS = 5
-""" Methods for android.system.Os based sockets """
-
-
-def open_android_socket(ad, domain, sock_type, ip, port):
-    """ Open TCP or UDP using android.system.Os class
-
-    Args:
-      1. ad - android device object
-      2. domain - IPv4 or IPv6 type
-      3. sock_type - UDP or TCP socket
-      4. ip - IP addr on the device
-      5. port - open socket on port
-
-    Returns:
-      File descriptor key
-    """
-    fd_key = ad.droid.openSocket(domain, sock_type, ip, port)
-    ad.log.info("File descriptor: %s" % fd_key)
-    asserts.assert_true(fd_key, "Failed to open socket")
-    return fd_key
-
-
-def close_android_socket(ad, fd_key):
-    """ Close socket
-
-    Args:
-      1. ad - android device object
-      2. fd_key - file descriptor key
-    """
-    status = ad.droid.closeSocket(fd_key)
-    asserts.assert_true(status, "Failed to close socket")
-
-
-def listen_accept_android_socket(client, server, client_fd, server_fd,
-                                 server_ip, server_port):
-    """ Listen, accept TCP sockets
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_fd : client's socket handle
-      4. server_fd : server's socket handle
-      5. server_ip : send data to this IP
-      6. server_port : send data to this port
-    """
-    server.droid.listenSocket(server_fd)
-    client.droid.connectSocket(client_fd, server_ip, server_port)
-    sock = server.droid.acceptSocket(server_fd)
-    asserts.assert_true(sock, "Failed to accept socket")
-    return sock
-
-
-def send_recv_data_android_sockets(client, server, client_fd, server_fd,
-                                   server_ip, server_port):
-    """ Send TCP or UDP data over android os sockets from client to server.
-        Verify that server received the data.
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_fd : client's socket handle
-      4. server_fd : server's socket handle
-      5. server_ip : send data to this IP
-      6. server_port : send data to this port
-    """
-    send_list = []
-    recv_list = []
-
-    for _ in range(1, PKTS + 1):
-        msg = MSG + " %s" % _
-        send_list.append(msg)
-        client.log.info("Sending message: %s" % msg)
-        client.droid.sendDataOverSocket(server_ip, server_port, msg, client_fd)
-        recv_msg = server.droid.recvDataOverSocket(server_fd)
-        server.log.info("Received message: %s" % recv_msg)
-        recv_list.append(recv_msg)
-
-    recv_list = [x.rstrip('\x00') if x else x for x in recv_list]
-    asserts.assert_true(send_list and recv_list and send_list == recv_list,
-                        "Send and recv information is incorrect")
-
-
-""" Methods for java.net.DatagramSocket based sockets """
-
-
-def open_datagram_socket(ad, ip, port):
-    """ Open datagram socket
-
-    Args:
-      1. ad : android device object
-      2. ip : IP addr on the device
-      3. port : socket port
-
-    Returns:
-      Hash key of the datagram socket
-    """
-    socket_key = ad.droid.openDatagramSocket(ip, port)
-    ad.log.info("Datagram socket: %s" % socket_key)
-    asserts.assert_true(socket_key, "Failed to open datagram socket")
-    return socket_key
-
-
-def close_datagram_socket(ad, socket_key):
-    """ Close datagram socket
-
-    Args:
-      1. socket_key : hash key of datagram socket
-    """
-    status = ad.droid.closeDatagramSocket(socket_key)
-    asserts.assert_true(status, "Failed to close datagram socket")
-
-
-def send_recv_data_datagram_sockets(client, server, client_sock, server_sock,
-                                    server_ip, server_port):
-    """ Send data over datagram socket from dut_a to dut_b.
-        Verify that dut_b received the data.
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_sock : client's socket handle
-      4. server_sock : server's socket handle
-      5. server_ip : send data to this IP
-      6. server_port : send data to this port
-    """
-    send_list = []
-    recv_list = []
-
-    for _ in range(1, PKTS + 1):
-        msg = MSG + " %s" % _
-        send_list.append(msg)
-        client.log.info("Sending message: %s" % msg)
-        client.droid.sendDataOverDatagramSocket(client_sock, msg, server_ip,
-                                                server_port)
-        recv_msg = server.droid.recvDataOverDatagramSocket(server_sock)
-        server.log.info("Received message: %s" % recv_msg)
-        recv_list.append(recv_msg)
-
-    recv_list = [x.rstrip('\x00') if x else x for x in recv_list]
-    asserts.assert_true(send_list and recv_list and send_list == recv_list,
-                        "Send and recv information is incorrect")
-
-
-""" Utils methods for java.net.Socket based sockets """
-
-
-def _accept_socket(server, server_ip, server_port, server_sock, q):
-    sock = server.droid.acceptTcpSocket(server_sock)
-    server.log.info("Server socket: %s" % sock)
-    q.put(sock)
-
-
-def _client_socket(client, server_ip, server_port, client_ip, client_port, q):
-    time.sleep(0.5)
-    sock = client.droid.openTcpSocket(server_ip, server_port, client_ip,
-                                      client_port)
-    client.log.info("Client socket: %s" % sock)
-    q.put(sock)
-
-
-def open_connect_socket(client, server, client_ip, server_ip, client_port,
-                        server_port, server_sock):
-    """ Open tcp socket and connect to server
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_ip : client's socket handle
-      4. server_ip : send data to this IP
-      5. client_port : port on client socket
-      6. server_port : port on server socket
-      7. server_sock : server socket
-
-    Returns:
-      client and server socket from successful connect
-    """
-    sq = queue.Queue()
-    cq = queue.Queue()
-    s = threading.Thread(target=_accept_socket,
-                         args=(server, server_ip, server_port, server_sock,
-                               sq))
-    c = threading.Thread(target=_client_socket,
-                         args=(client, server_ip, server_port, client_ip,
-                               client_port, cq))
-    s.start()
-    c.start()
-    c.join()
-    s.join()
-
-    client_sock = cq.get()
-    server_sock = sq.get()
-    asserts.assert_true(client_sock and server_sock, "Failed to open sockets")
-
-    return client_sock, server_sock
-
-
-def open_server_socket(server, server_ip, server_port):
-    """ Open tcp server socket
-
-    Args:
-      1. server : ad object for server device
-      2. server_ip : send data to this IP
-      3. server_port : send data to this port
-    """
-    sock = server.droid.openTcpServerSocket(server_ip, server_port)
-    server.log.info("Server Socket: %s" % sock)
-    asserts.assert_true(sock, "Failed to open server socket")
-    return sock
-
-
-def close_socket(ad, socket):
-    """ Close socket
-
-    Args:
-      1. ad - android device object
-      2. socket - socket key
-    """
-    status = ad.droid.closeTcpSocket(socket)
-    asserts.assert_true(status, "Failed to socket")
-
-
-def close_server_socket(ad, socket):
-    """ Close server socket
-
-    Args:
-      1. ad - android device object
-      2. socket - server socket key
-    """
-    status = ad.droid.closeTcpServerSocket(socket)
-    asserts.assert_true(status, "Failed to socket")
-
-
-def shutdown_socket(ad, socket):
-    """ Shutdown socket
-
-    Args:
-      1. ad - androidandroid device object
-      2. socket - socket key
-    """
-    fd = ad.droid.getFileDescriptorOfSocket(socket)
-    asserts.assert_true(fd, "Failed to get FileDescriptor key")
-    status = ad.droid.shutdownFileDescriptor(fd)
-    asserts.assert_true(status, "Failed to shutdown socket")
-
-
-def send_recv_data_sockets(client, server, client_sock, server_sock):
-    """ Send data over TCP socket from client to server.
-        Verify that server received the data
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_sock : client's socket handle
-      4. server_sock : server's socket handle
-    """
-    send_list = []
-    recv_list = []
-
-    for _ in range(1, PKTS + 1):
-        msg = MSG + " %s" % _
-        send_list.append(msg)
-        client.log.info("Sending message: %s" % msg)
-        client.droid.sendDataOverTcpSocket(client_sock, msg)
-        recv_msg = server.droid.recvDataOverTcpSocket(server_sock)
-        server.log.info("Received message: %s" % recv_msg)
-        recv_list.append(recv_msg)
-
-    recv_list = [x.rstrip('\x00') if x else x for x in recv_list]
-    asserts.assert_true(send_list and recv_list and send_list == recv_list,
-                        "Send and recv information is incorrect")
diff --git a/src/antlion/test_utils/net/ui_utils.py b/src/antlion/test_utils/net/ui_utils.py
deleted file mode 100644
index 03aa6c5..0000000
--- a/src/antlion/test_utils/net/ui_utils.py
+++ /dev/null
@@ -1,277 +0,0 @@
-"""Utils for adb-based UI operations."""
-
-import collections
-import logging
-import os
-import re
-import time
-
-from xml.dom import minidom
-from antlion.controllers.android_lib.errors import AndroidDeviceError
-
-
-class Point(collections.namedtuple('Point', ['x', 'y'])):
-
-  def __repr__(self):
-    return '{x},{y}'.format(x=self.x, y=self.y)
-
-
-class Bounds(collections.namedtuple('Bounds', ['start', 'end'])):
-
-  def __repr__(self):
-    return '[{start}][{end}]'.format(start=str(self.start), end=str(self.end))
-
-  def calculate_middle_point(self):
-    return Point((self.start.x + self.end.x) // 2,
-                 (self.start.y + self.end.y) // 2)
-
-
-def get_key_value_pair_strings(kv_pairs):
-  return ' '.join(['%s="%s"' % (k, v) for k, v in kv_pairs.items()])
-
-
-def parse_bound(bounds_string):
-  """Parse UI bound string.
-
-  Args:
-    bounds_string: string, In the format of the UI element bound.
-                   e.g '[0,0][1080,2160]'
-
-  Returns:
-    Bounds, The bound of UI element.
-  """
-  bounds_pattern = re.compile(r'\[(\d+),(\d+)\]\[(\d+),(\d+)\]')
-  points = bounds_pattern.match(bounds_string).groups()
-  points = list(map(int, points))
-  return Bounds(Point(*points[:2]), Point(*points[-2:]))
-
-
-def _find_point_in_bounds(bounds_string):
-  """Finds a point that resides within the given bounds.
-
-  Args:
-    bounds_string: string, In the format of the UI element bound.
-
-  Returns:
-    A tuple of integers, representing X and Y coordinates of a point within
-    the given boundary.
-  """
-  return parse_bound(bounds_string).calculate_middle_point()
-
-
-def get_screen_dump_xml(device):
-  """Gets an XML dump of the current device screen.
-
-  This only works when there is no instrumentation process running. A running
-  instrumentation process will disrupt calls for `adb shell uiautomator dump`.
-
-  Args:
-    device: AndroidDevice object.
-
-  Returns:
-    XML Document of the screen dump.
-  """
-  os.makedirs(device.log_path, exist_ok=True)
-  device.adb.shell('uiautomator dump')
-  device.adb.pull('/sdcard/window_dump.xml %s' % device.log_path)
-  return minidom.parse('%s/window_dump.xml' % device.log_path)
-
-
-def match_node(node, **matcher):
-  """Determine if a mode matches with the given matcher.
-
-  Args:
-    node: Is a XML node to be checked against matcher.
-    **matcher: Is a dict representing mobly AdbUiDevice matchers.
-
-  Returns:
-    True if all matchers match the given node.
-  """
-  match_list = []
-  for k, v in matcher.items():
-    if k == 'class_name':
-      key = k.replace('class_name', 'class')
-    elif k == 'text_contains':
-      key = k.replace('text_contains', 'text')
-    else:
-      key = k.replace('_', '-')
-    try:
-      if k == 'text_contains':
-        match_list.append(v in node.attributes[key].value)
-      else:
-        match_list.append(node.attributes[key].value == v)
-    except KeyError:
-      match_list.append(False)
-  return all(match_list)
-
-
-def _find_node(screen_dump_xml, **kwargs):
-  """Finds an XML node from an XML DOM.
-
-  Args:
-    screen_dump_xml: XML doc, parsed from adb ui automator dump.
-    **kwargs: key/value pairs to match in an XML node's attributes. Value of
-      each key has to be string type. Below lists keys which can be used:
-        index
-        text
-        text_contains (matching a part of text attribute)
-        resource_id
-        class_name (representing "class" attribute)
-        package
-        content_desc
-        checkable
-        checked
-        clickable
-        enabled
-        focusable
-        focused
-        scrollable
-        long_clickable
-        password
-        selected
-        A special key/value: matching_node key is used to identify If more than one nodes have the same key/value,
-            the matching_node stands for which matching node should be fetched.
-
-  Returns:
-    XML node of the UI element or None if not found.
-  """
-  nodes = screen_dump_xml.getElementsByTagName('node')
-  matching_node = kwargs.pop('matching_node', 1)
-  count = 1
-  for node in nodes:
-    if match_node(node, **kwargs):
-      if count == matching_node:
-        logging.debug('Found a node matching conditions: %s',
-                      get_key_value_pair_strings(kwargs))
-        return node
-      count += 1
-  return None
-
-
-def wait_and_get_xml_node(device, timeout, child=None, sibling=None, **kwargs):
-  """Waits for a node to appear and return it.
-
-  Args:
-    device: AndroidDevice object.
-    timeout: float, The number of seconds to wait for before giving up.
-    child: dict, a dict contains child XML node's attributes. It is extra set of
-      conditions to match an XML node that is under the XML node which is found
-      by **kwargs.
-    sibling: dict, a dict contains sibling XML node's attributes. It is extra
-      set of conditions to match an XML node that is under parent of the XML
-      node which is found by **kwargs.
-    **kwargs: Key/value pairs to match in an XML node's attributes.
-
-  Returns:
-    The XML node of the UI element.
-
-  Raises:
-    AndroidDeviceError: if the UI element does not appear on screen within
-    timeout or extra sets of conditions of child and sibling are used in a call.
-  """
-  if child and sibling:
-    raise AndroidDeviceError(
-        device, 'Only use one extra set of conditions: child or sibling.')
-  start_time = time.time()
-  threshold = start_time + timeout
-  while time.time() < threshold:
-    time.sleep(1)
-    screen_dump_xml = get_screen_dump_xml(device)
-    node = _find_node(screen_dump_xml, **kwargs)
-    if node and child:
-      node = _find_node(node, **child)
-    if node and sibling:
-      node = _find_node(node.parentNode, **sibling)
-    if node:
-      return node
-  msg = ('Timed out after %ds waiting for UI node matching conditions: %s.'
-         % (timeout, get_key_value_pair_strings(kwargs)))
-  if child:
-    msg = ('%s extra conditions: %s'
-           % (msg, get_key_value_pair_strings(child)))
-  if sibling:
-    msg = ('%s extra conditions: %s'
-           % (msg, get_key_value_pair_strings(sibling)))
-  raise AndroidDeviceError(device, msg)
-
-
-def has_element(device, **kwargs):
-  """Checks a UI element whether appears or not in the current screen.
-
-  Args:
-    device: AndroidDevice object.
-    **kwargs: Key/value pairs to match in an XML node's attributes.
-
-  Returns:
-    True if the UI element appears in the current screen else False.
-  """
-  timeout_sec = kwargs.pop('timeout', 30)
-  try:
-    wait_and_get_xml_node(device, timeout_sec, **kwargs)
-    return True
-  except AndroidDeviceError:
-    return False
-
-
-def get_element_attributes(device, **kwargs):
-  """Gets a UI element's all attributes.
-
-  Args:
-    device: AndroidDevice object.
-    **kwargs: Key/value pairs to match in an XML node's attributes.
-
-  Returns:
-    XML Node Attributes.
-  """
-  timeout_sec = kwargs.pop('timeout', 30)
-  node = wait_and_get_xml_node(device, timeout_sec, **kwargs)
-  return node.attributes
-
-
-def wait_and_click(device, duration_ms=None, **kwargs):
-  """Wait for a UI element to appear and click on it.
-
-  This function locates a UI element on the screen by matching attributes of
-  nodes in XML DOM, calculates a point's coordinates within the boundary of the
-  element, and clicks on the point marked by the coordinates.
-
-  Args:
-    device: AndroidDevice object.
-    duration_ms: int, The number of milliseconds to long-click.
-    **kwargs: A set of `key=value` parameters that identifies a UI element.
-  """
-  timeout_sec = kwargs.pop('timeout', 30)
-  button_node = wait_and_get_xml_node(device, timeout_sec, **kwargs)
-  x, y = _find_point_in_bounds(button_node.attributes['bounds'].value)
-  args = []
-  if duration_ms is None:
-    args = 'input tap %s %s' % (str(x), str(y))
-  else:
-    # Long click.
-    args = 'input swipe %s %s %s %s %s' % \
-        (str(x), str(y), str(x), str(y), str(duration_ms))
-  device.adb.shell(args)
-
-def wait_and_input_text(device, input_text, duration_ms=None, **kwargs):
-  """Wait for a UI element text field that can accept text entry.
-
-  This function located a UI element using wait_and_click. Once the element is
-  clicked, the text is input into the text field.
-
-  Args:
-    device: AndroidDevice, Mobly's Android controller object.
-    input_text: Text string to be entered in to the text field.
-    duration_ms: duration in milliseconds.
-    **kwargs: A set of `key=value` parameters that identifies a UI element.
-  """
-  wait_and_click(device, duration_ms, **kwargs)
-  # Replace special characters.
-  # The command "input text <string>" requires special treatment for
-  # characters ' ' and '&'.  They need to be escaped. for example:
-  #    "hello world!!&" needs to transform to "hello\ world!!\&"
-  special_chars = ' &'
-  for c in special_chars:
-    input_text = input_text.replace(c, '\\%s' % c)
-  input_text = "'" + input_text + "'"
-  args = 'input text %s' % input_text
-  device.adb.shell(args)
diff --git a/src/antlion/test_utils/users/__init__.py b/src/antlion/test_utils/users/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/users/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/users/users.py b/src/antlion/test_utils/users/users.py
deleted file mode 100644
index 07df00d..0000000
--- a/src/antlion/test_utils/users/users.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-#
-# Defines utilities that can be used to create android user account
-
-import re
-import time
-import logging as log
-
-
-
-def get_all_users(android_device):
-    all_users = {}
-    out = android_device.adb.shell("pm list users")
-
-    for user in re.findall("UserInfo{(.*\d*\w):", out):
-        all = user.split(":")
-        all_users[all[1]] = all_users.get(all[1], all[0])
-    return all_users
-
-
-def create_new_user(android_device, user_name):
-    out = android_device.adb.shell("pm create-user {}".format(user_name))
-    return re.search("Success(.* (.*\d))", out).group(2)
-
-
-def switch_user(android_device, user_id):
-    prev_user = get_current_user(android_device)
-    android_device.adb.shell("am switch-user {}".format(user_id))
-    if not _wait_for_user_to_take_place(android_device, prev_user):
-        log.error("Failed to successfully switch user {}".format(user_id))
-        return False
-    return True
-
-
-def remove_user(android_device, user_id):
-    return "Success" in android_device.adb.shell("pm remove-user {}".format(user_id))
-
-
-def get_current_user(android_device):
-    out = android_device.adb.shell("dumpsys activity")
-    result = re.search("mCurrentUserId:(\d+)", out)
-    return result.group(1)
-
-
-def _wait_for_user_to_take_place(android_device, user_id, timeout=10):
-    start_time = time.time()
-    while (start_time + timeout) > time.time():
-        time.sleep(1)
-        if user_id != get_current_user(android_device):
-            return True
-    return False
diff --git a/src/antlion/test_utils/wifi/RttPostFlightTest.py b/src/antlion/test_utils/wifi/RttPostFlightTest.py
deleted file mode 100644
index 16d85e2..0000000
--- a/src/antlion/test_utils/wifi/RttPostFlightTest.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-import antlion.test_utils.wifi.rpm_controller_utils as rutils
-import antlion.test_utils.wifi.wifi_test_utils as wutils
-from antlion import asserts
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-SSID = "DO_NOT_CONNECT"
-TIMEOUT = 60
-WAIT_TIME = 10
-
-class RttPostFlightTest(WifiBaseTest):
-    """Turns off 802.11mc AP after RTT tests."""
-
-    def setup_class(self):
-        super().setup_class()
-        self.dut = self.android_devices[0]
-        required_params = ["rpm_ip", "rpm_port"]
-        self.unpack_userparams(req_param_names=required_params)
-        self.rpm_telnet = rutils.create_telnet_session(self.rpm_ip)
-
-    ### Tests ###
-
-    def test_turn_off_80211mc_ap(self):
-        self.rpm_telnet.turn_off(self.rpm_port)
-        curr_time = time.time()
-        while time.time() < curr_time + TIMEOUT:
-            time.sleep(WAIT_TIME)
-            if not wutils.start_wifi_connection_scan_and_check_for_network(
-                self.dut, SSID):
-                return True
-        self.log.error("Failed to turn off AP")
-        return False
diff --git a/src/antlion/test_utils/wifi/RttPreFlightTest.py b/src/antlion/test_utils/wifi/RttPreFlightTest.py
deleted file mode 100644
index 0ab6d99..0000000
--- a/src/antlion/test_utils/wifi/RttPreFlightTest.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-import antlion.test_utils.wifi.rpm_controller_utils as rutils
-import antlion.test_utils.wifi.wifi_test_utils as wutils
-from antlion import asserts
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-SSID = "DO_NOT_CONNECT"
-TIMEOUT = 60
-WAIT_TIME = 10
-
-class RttPreFlightTest(WifiBaseTest):
-    """Turns on/off 802.11mc AP before and after RTT tests."""
-
-    def setup_class(self):
-        super().setup_class()
-        self.dut = self.android_devices[0]
-        required_params = ["rpm_ip", "rpm_port"]
-        self.unpack_userparams(req_param_names=required_params)
-        self.rpm_telnet = rutils.create_telnet_session(self.rpm_ip)
-
-    ### Tests ###
-
-    def test_turn_on_80211mc_ap(self):
-        self.rpm_telnet.turn_on(self.rpm_port)
-        curr_time = time.time()
-        while time.time() < curr_time + TIMEOUT:
-            time.sleep(WAIT_TIME)
-            if wutils.start_wifi_connection_scan_and_check_for_network(
-                self.dut, SSID):
-                return True
-        self.log.error("Failed to turn on AP")
-        return False
diff --git a/src/antlion/test_utils/wifi/aware/AwareBaseTest.py b/src/antlion/test_utils/wifi/aware/AwareBaseTest.py
deleted file mode 100644
index 45b222d..0000000
--- a/src/antlion/test_utils/wifi/aware/AwareBaseTest.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-from antlion import asserts
-from antlion import utils
-from antlion.base_test import BaseTestClass
-from antlion.keys import Config
-from antlion.test_utils.net import net_test_utils as nutils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.aware import aware_const as aconsts
-from antlion.test_utils.wifi.aware import aware_test_utils as autils
-
-
-class AwareBaseTest(BaseTestClass):
-    # message ID counter to make sure all uses are unique
-    msg_id = 0
-
-    # offset (in seconds) to separate the start-up of multiple devices.
-    # De-synchronizes the start-up time so that they don't start and stop scanning
-    # at the same time - which can lead to very long clustering times.
-    device_startup_offset = 2
-
-    def setup_class(self):
-        opt_param = ["pixel_models", "cnss_diag_file", "ranging_role_concurrency_flexible_models"]
-        self.unpack_userparams(opt_param_names=opt_param)
-        if hasattr(self, "cnss_diag_file"):
-            if isinstance(self.cnss_diag_file, list):
-                self.cnss_diag_file = self.cnss_diag_file[0]
-            if not os.path.isfile(self.cnss_diag_file):
-                self.cnss_diag_file = os.path.join(
-                    self.user_params[Config.key_config_path.value],
-                    self.cnss_diag_file)
-
-    def setup_test(self):
-        required_params = ("aware_default_power_mode",
-                           "dbs_supported_models",)
-        self.unpack_userparams(required_params)
-
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.start_cnss_diags(
-                self.android_devices, self.cnss_diag_file, self.pixel_models)
-        self.tcpdump_proc = []
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                proc = nutils.start_tcpdump(ad, self.test_name)
-                self.tcpdump_proc.append((ad, proc))
-
-        for ad in self.android_devices:
-            ad.droid.wifiEnableVerboseLogging(1)
-            asserts.skip_if(
-                not ad.droid.doesDeviceSupportWifiAwareFeature(),
-                "Device under test does not support Wi-Fi Aware - skipping test"
-            )
-            aware_avail = ad.droid.wifiIsAwareAvailable()
-            ad.droid.wifiP2pClose()
-            wutils.wifi_toggle_state(ad, True)
-            utils.set_location_service(ad, True)
-            if not aware_avail:
-                self.log.info('Aware not available. Waiting ...')
-                autils.wait_for_event(ad,
-                                      aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
-            ad.aware_capabilities = autils.get_aware_capabilities(ad)
-            self.reset_device_parameters(ad)
-            self.reset_device_statistics(ad)
-            self.set_power_mode_parameters(ad)
-            wutils.set_wifi_country_code(ad, wutils.WifiEnums.CountryCode.US)
-            autils.configure_ndp_allow_any_override(ad, True)
-            # set randomization interval to 0 (disable) to reduce likelihood of
-            # interference in tests
-            autils.configure_mac_random_interval(ad, 0)
-            ad.ed.clear_all_events()
-
-    def teardown_test(self):
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(
-                    proc[0], proc[1], self.test_name, pull_dump=False)
-        self.tcpdump_proc = []
-        for ad in self.android_devices:
-            if not ad.droid.doesDeviceSupportWifiAwareFeature():
-                return
-            ad.droid.wifiP2pClose()
-            ad.droid.wifiAwareDestroyAll()
-            self.reset_device_parameters(ad)
-            autils.validate_forbidden_callbacks(ad)
-
-    def reset_device_parameters(self, ad):
-        """Reset device configurations which may have been set by tests. Should be
-    done before tests start (in case previous one was killed without tearing
-    down) and after they end (to leave device in usable state).
-
-    Args:
-      ad: device to be reset
-    """
-        ad.adb.shell("cmd wifiaware reset")
-
-    def reset_device_statistics(self, ad):
-        """Reset device statistics.
-
-    Args:
-        ad: device to be reset
-    """
-        ad.adb.shell("cmd wifiaware native_cb get_cb_count --reset")
-
-    def set_power_mode_parameters(self, ad):
-        """Set the power configuration DW parameters for the device based on any
-    configuration overrides (if provided)"""
-        if self.aware_default_power_mode == "INTERACTIVE":
-            autils.config_settings_high_power(ad)
-        elif self.aware_default_power_mode == "NON_INTERACTIVE":
-            autils.config_settings_low_power(ad)
-        else:
-            asserts.assert_false(
-                "The 'aware_default_power_mode' configuration must be INTERACTIVE or "
-                "NON_INTERACTIVE")
-
-    def get_next_msg_id(self):
-        """Increment the message ID and returns the new value. Guarantees that
-    each call to the method returns a unique value.
-
-    Returns: a new message id value.
-    """
-        self.msg_id = self.msg_id + 1
-        return self.msg_id
-
-    def on_fail(self, test_name, begin_time):
-        for ad in self.android_devices:
-            ad.take_bug_report(test_name, begin_time)
-            ad.cat_adb_log(test_name, begin_time)
-            wutils.get_ssrdumps(ad)
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-            for ad in self.android_devices:
-                wutils.get_cnss_diag_log(ad)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(proc[0], proc[1], self.test_name)
-        self.tcpdump_proc = []
diff --git a/src/antlion/test_utils/wifi/aware/__init__.py b/src/antlion/test_utils/wifi/aware/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/wifi/aware/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/wifi/aware/aware_const.py b/src/antlion/test_utils/wifi/aware/aware_const.py
deleted file mode 100644
index a720196..0000000
--- a/src/antlion/test_utils/wifi/aware/aware_const.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-######################################################
-# Aware power settings values for interactive (high power) and
-# non-interactive (low power) modes
-######################################################
-
-POWER_DW_24_INTERACTIVE = 1
-POWER_DW_5_INTERACTIVE = 1
-POWER_DISC_BEACON_INTERVAL_INTERACTIVE = 0
-POWER_NUM_SS_IN_DISC_INTERACTIVE = 0
-POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE = 0
-
-POWER_DW_24_NON_INTERACTIVE = 4
-POWER_DW_5_NON_INTERACTIVE = 0
-POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE = 0
-POWER_NUM_SS_IN_DISC_NON_INTERACTIVE = 0
-POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE = 0
-
-######################################################
-# Broadcast events
-######################################################
-BROADCAST_WIFI_AWARE_AVAILABLE = "WifiAwareAvailable"
-BROADCAST_WIFI_AWARE_NOT_AVAILABLE = "WifiAwareNotAvailable"
-
-######################################################
-# ConfigRequest keys
-######################################################
-
-CONFIG_KEY_5G_BAND = "Support5gBand"
-CONFIG_KEY_MASTER_PREF = "MasterPreference"
-CONFIG_KEY_CLUSTER_LOW = "ClusterLow"
-CONFIG_KEY_CLUSTER_HIGH = "ClusterHigh"
-CONFIG_KEY_ENABLE_IDEN_CB = "EnableIdentityChangeCallback"
-
-######################################################
-# Publish & Subscribe Config keys
-######################################################
-
-DISCOVERY_KEY_SERVICE_NAME = "ServiceName"
-DISCOVERY_KEY_SSI = "ServiceSpecificInfo"
-DISCOVERY_KEY_MATCH_FILTER = "MatchFilter"
-DISCOVERY_KEY_MATCH_FILTER_LIST = "MatchFilterList"
-DISCOVERY_KEY_DISCOVERY_TYPE = "DiscoveryType"
-DISCOVERY_KEY_TTL = "TtlSec"
-DISCOVERY_KEY_TERM_CB_ENABLED = "TerminateNotificationEnabled"
-DISCOVERY_KEY_RANGING_ENABLED = "RangingEnabled"
-DISCOVERY_KEY_MIN_DISTANCE_MM = "MinDistanceMm"
-DISCOVERY_KEY_MAX_DISTANCE_MM = "MaxDistanceMm"
-
-PUBLISH_TYPE_UNSOLICITED = 0
-PUBLISH_TYPE_SOLICITED = 1
-
-SUBSCRIBE_TYPE_PASSIVE = 0
-SUBSCRIBE_TYPE_ACTIVE = 1
-
-######################################################
-# WifiAwareAttachCallback events
-######################################################
-EVENT_CB_ON_ATTACHED = "WifiAwareOnAttached"
-EVENT_CB_ON_ATTACH_FAILED = "WifiAwareOnAttachFailed"
-
-######################################################
-# WifiAwareIdentityChangedListener events
-######################################################
-EVENT_CB_ON_IDENTITY_CHANGED = "WifiAwareOnIdentityChanged"
-
-# WifiAwareAttachCallback & WifiAwareIdentityChangedListener events keys
-EVENT_CB_KEY_REASON = "reason"
-EVENT_CB_KEY_MAC = "mac"
-EVENT_CB_KEY_LATENCY_MS = "latencyMs"
-EVENT_CB_KEY_TIMESTAMP_MS = "timestampMs"
-
-######################################################
-# WifiAwareDiscoverySessionCallback events
-######################################################
-SESSION_CB_ON_PUBLISH_STARTED = "WifiAwareSessionOnPublishStarted"
-SESSION_CB_ON_SUBSCRIBE_STARTED = "WifiAwareSessionOnSubscribeStarted"
-SESSION_CB_ON_SESSION_CONFIG_UPDATED = "WifiAwareSessionOnSessionConfigUpdated"
-SESSION_CB_ON_SESSION_CONFIG_FAILED = "WifiAwareSessionOnSessionConfigFailed"
-SESSION_CB_ON_SESSION_TERMINATED = "WifiAwareSessionOnSessionTerminated"
-SESSION_CB_ON_SERVICE_DISCOVERED = "WifiAwareSessionOnServiceDiscovered"
-SESSION_CB_ON_MESSAGE_SENT = "WifiAwareSessionOnMessageSent"
-SESSION_CB_ON_MESSAGE_SEND_FAILED = "WifiAwareSessionOnMessageSendFailed"
-SESSION_CB_ON_MESSAGE_RECEIVED = "WifiAwareSessionOnMessageReceived"
-SESSION_CB_ON_SERVICE_LOST = "WifiAwareSessionOnServiceLost"
-
-# WifiAwareDiscoverySessionCallback events keys
-SESSION_CB_KEY_CB_ID = "callbackId"
-SESSION_CB_KEY_SESSION_ID = "discoverySessionId"
-SESSION_CB_KEY_REASON = "reason"
-SESSION_CB_KEY_PEER_ID = "peerId"
-SESSION_CB_KEY_SERVICE_SPECIFIC_INFO = "serviceSpecificInfo"
-SESSION_CB_KEY_MATCH_FILTER = "matchFilter"
-SESSION_CB_KEY_MATCH_FILTER_LIST = "matchFilterList"
-SESSION_CB_KEY_MESSAGE = "message"
-SESSION_CB_KEY_MESSAGE_ID = "messageId"
-SESSION_CB_KEY_MESSAGE_AS_STRING = "messageAsString"
-SESSION_CB_KEY_LATENCY_MS = "latencyMs"
-SESSION_CB_KEY_TIMESTAMP_MS = "timestampMs"
-SESSION_CB_KEY_DISTANCE_MM = "distanceMm"
-SESSION_CB_KEY_LOST_REASON = "lostReason"
-
-# WifiAwareDiscoverySessionCallback onServiceLost reason code
-REASON_PEER_NOT_VISIBLE = 1
-
-######################################################
-# WifiAwareRangingListener events (RttManager.RttListener)
-######################################################
-RTT_LISTENER_CB_ON_SUCCESS = "WifiAwareRangingListenerOnSuccess"
-RTT_LISTENER_CB_ON_FAILURE = "WifiAwareRangingListenerOnFailure"
-RTT_LISTENER_CB_ON_ABORT = "WifiAwareRangingListenerOnAborted"
-
-# WifiAwareRangingListener events (RttManager.RttListener) keys
-RTT_LISTENER_CB_KEY_CB_ID = "callbackId"
-RTT_LISTENER_CB_KEY_SESSION_ID = "sessionId"
-RTT_LISTENER_CB_KEY_RESULTS = "Results"
-RTT_LISTENER_CB_KEY_REASON = "reason"
-RTT_LISTENER_CB_KEY_DESCRIPTION = "description"
-
-######################################################
-# Capabilities keys
-######################################################
-
-CAP_MAX_CONCURRENT_AWARE_CLUSTERS = "maxConcurrentAwareClusters"
-CAP_MAX_PUBLISHES = "maxPublishes"
-CAP_MAX_SUBSCRIBES = "maxSubscribes"
-CAP_MAX_SERVICE_NAME_LEN = "maxServiceNameLen"
-CAP_MAX_MATCH_FILTER_LEN = "maxMatchFilterLen"
-CAP_MAX_TOTAL_MATCH_FILTER_LEN = "maxTotalMatchFilterLen"
-CAP_MAX_SERVICE_SPECIFIC_INFO_LEN = "maxServiceSpecificInfoLen"
-CAP_MAX_EXTENDED_SERVICE_SPECIFIC_INFO_LEN = "maxExtendedServiceSpecificInfoLen"
-CAP_MAX_NDI_INTERFACES = "maxNdiInterfaces"
-CAP_MAX_NDP_SESSIONS = "maxNdpSessions"
-CAP_MAX_APP_INFO_LEN = "maxAppInfoLen"
-CAP_MAX_QUEUED_TRANSMIT_MESSAGES = "maxQueuedTransmitMessages"
-CAP_MAX_SUBSCRIBE_INTERFACE_ADDRESSES = "maxSubscribeInterfaceAddresses"
-CAP_SUPPORTED_CIPHER_SUITES = "supportedCipherSuites"
-
-######################################################
-# WifiAwareNetworkCapabilities keys
-######################################################
-
-NET_CAP_IPV6 = "aware_ipv6"
-NET_CAP_PORT = "aware_port"
-NET_CAP_TRANSPORT_PROTOCOL = "aware_transport_protocol"
-
-######################################################
-
-# Aware NDI (NAN data-interface) name prefix
-AWARE_NDI_PREFIX = "aware_data"
-
-# Aware discovery channels
-AWARE_DISCOVERY_CHANNEL_24_BAND = 6
-AWARE_DISCOVERY_CHANNEL_5_BAND = 149
-
-# Aware Data-Path Constants
-DATA_PATH_INITIATOR = 0
-DATA_PATH_RESPONDER = 1
-
-# Maximum send retry
-MAX_TX_RETRIES = 5
-
-# Callback keys (for 'adb shell cmd wifiaware native_cb get_cb_count')
-CB_EV_CLUSTER = "0"
-CB_EV_DISABLED = "1"
-CB_EV_PUBLISH_TERMINATED = "2"
-CB_EV_SUBSCRIBE_TERMINATED = "3"
-CB_EV_MATCH = "4"
-CB_EV_MATCH_EXPIRED = "5"
-CB_EV_FOLLOWUP_RECEIVED = "6"
-CB_EV_TRANSMIT_FOLLOWUP = "7"
-CB_EV_DATA_PATH_REQUEST = "8"
-CB_EV_DATA_PATH_CONFIRM = "9"
-CB_EV_DATA_PATH_TERMINATED = "10"
diff --git a/src/antlion/test_utils/wifi/aware/aware_test_utils.py b/src/antlion/test_utils/wifi/aware/aware_test_utils.py
deleted file mode 100644
index 6c54791..0000000
--- a/src/antlion/test_utils/wifi/aware/aware_test_utils.py
+++ /dev/null
@@ -1,1055 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import base64
-import json
-import queue
-import re
-import statistics
-import time
-from antlion import asserts
-
-from antlion.test_utils.net import connectivity_const as cconsts
-from antlion.test_utils.net import socket_test_utils as sutils
-from antlion.test_utils.wifi.aware import aware_const as aconsts
-
-# arbitrary timeout for events
-EVENT_TIMEOUT = 10
-
-# semi-arbitrary timeout for network formation events. Based on framework
-# timeout for NDP (NAN data-path) negotiation to be completed.
-EVENT_NDP_TIMEOUT = 20
-
-# number of second to 'reasonably' wait to make sure that devices synchronize
-# with each other - useful for OOB test cases, where the OOB discovery would
-# take some time
-WAIT_FOR_CLUSTER = 5
-
-
-def decorate_event(event_name, id):
-    return '%s_%d' % (event_name, id)
-
-
-def wait_for_event(ad, event_name, timeout=EVENT_TIMEOUT):
-    """Wait for the specified event or timeout.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait on
-    timeout: Number of seconds to wait
-  Returns:
-    The event (if available)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.pop_event(event_name, timeout)
-        ad.log.info('%s%s: %s', prefix, event_name, event['data'])
-        return event
-    except queue.Empty:
-        ad.log.info('%sTimed out while waiting for %s', prefix, event_name)
-        asserts.fail(event_name)
-
-def _filter_callbacks(event, expected_kv):
-    """
-    Helper method to use in |fail_on_event_with_keys| and
-    |wait_for_event_with_keys|
-    """
-    for expected_k, expected_v in expected_kv:
-        actual_v = event['data'][expected_k]
-        if isinstance(expected_v, dict) and isinstance(actual_v, dict):
-            # |expected_v| not a subset of |actual_v|
-            if not(expected_v.items() <= actual_v.items()):
-                return False
-        else:
-            if actual_v != expected_v:
-                return False
-    return True
-
-
-def wait_for_event_with_keys(ad,
-                             event_name,
-                             timeout=EVENT_TIMEOUT,
-                             *keyvalues):
-    """Wait for the specified event contain the key/value pairs or timeout
-
-  Args:
-    ad: The android device
-    event_name: The event to wait on
-    timeout: Number of seconds to wait
-    keyvalues: Expected (key, value) pairs. If the value for a key is a dict,
-               then this will perform subset matching for that key.
-  Returns:
-    The event (if available)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.wait_for_event(event_name, _filter_callbacks, timeout,
-                                     keyvalues)
-        ad.log.info('%s%s: %s', prefix, event_name, event['data'])
-        return event
-    except queue.Empty:
-        ad.log.info('%sTimed out while waiting for %s (%s)', prefix,
-                    event_name, keyvalues)
-        asserts.fail(event_name)
-
-
-def fail_on_event(ad, event_name, timeout=EVENT_TIMEOUT):
-    """Wait for a timeout period and looks for the specified event - fails if it
-  is observed.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait for (and fail on its appearance)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.pop_event(event_name, timeout)
-        ad.log.info('%sReceived unwanted %s: %s', prefix, event_name,
-                    event['data'])
-        asserts.fail(event_name, extras=event)
-    except queue.Empty:
-        ad.log.info('%s%s not seen (as expected)', prefix, event_name)
-        return
-
-
-def fail_on_event_with_keys(ad, event_name, timeout=EVENT_TIMEOUT, *keyvalues):
-    """Wait for a timeout period and looks for the specified event which contains
-  the key/value pairs - fails if it is observed.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait on
-    timeout: Number of seconds to wait
-    keyvalues: Expected (key, value) pairs. If the value for a key is a dict,
-               then this will perform subset matching for that key.
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.wait_for_event(event_name, _filter_callbacks, timeout,
-                                     keyvalues)
-        ad.log.info('%sReceived unwanted %s: %s', prefix, event_name,
-                    event['data'])
-        asserts.fail(event_name, extras=event)
-    except queue.Empty:
-        ad.log.info('%s%s (%s) not seen (as expected)', prefix, event_name,
-                    keyvalues)
-        return
-
-
-def verify_no_more_events(ad, timeout=EVENT_TIMEOUT):
-    """Verify that there are no more events in the queue.
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    should_fail = False
-    try:
-        while True:
-            event = ad.ed.pop_events('.*', timeout, freq=0)
-            ad.log.info('%sQueue contains %s', prefix, event)
-            should_fail = True
-    except queue.Empty:
-        if should_fail:
-            asserts.fail('%sEvent queue not empty' % prefix)
-        ad.log.info('%sNo events in the queue (as expected)', prefix)
-        return
-
-
-def encode_list(list_of_objects):
-    """Converts the list of strings or bytearrays to a list of b64 encoded
-  bytearrays.
-
-  A None object is treated as a zero-length bytearray.
-
-  Args:
-    list_of_objects: A list of strings or bytearray objects
-  Returns: A list of the same objects, converted to bytes and b64 encoded.
-  """
-    encoded_list = []
-    for obj in list_of_objects:
-        if obj is None:
-            obj = bytes()
-        if isinstance(obj, str):
-            encoded_list.append(
-                base64.b64encode(bytes(obj, 'utf-8')).decode('utf-8'))
-        else:
-            encoded_list.append(base64.b64encode(obj).decode('utf-8'))
-    return encoded_list
-
-
-def decode_list(list_of_b64_strings):
-    """Converts the list of b64 encoded strings to a list of bytearray.
-
-  Args:
-    list_of_b64_strings: list of strings, each of which is b64 encoded array
-  Returns: a list of bytearrays.
-  """
-    decoded_list = []
-    for str in list_of_b64_strings:
-        decoded_list.append(base64.b64decode(str))
-    return decoded_list
-
-
-def construct_max_match_filter(max_size):
-    """Constructs a maximum size match filter that fits into the 'max_size' bytes.
-
-  Match filters are a set of LVs (Length, Value pairs) where L is 1 byte. The
-  maximum size match filter will contain max_size/2 LVs with all Vs (except
-  possibly the last one) of 1 byte, the last V may be 2 bytes for odd max_size.
-
-  Args:
-    max_size: Maximum size of the match filter.
-  Returns: an array of bytearrays.
-  """
-    mf_list = []
-    num_lvs = max_size // 2
-    for i in range(num_lvs - 1):
-        mf_list.append(bytes([i]))
-    if (max_size % 2 == 0):
-        mf_list.append(bytes([255]))
-    else:
-        mf_list.append(bytes([254, 255]))
-    return mf_list
-
-
-def assert_equal_strings(first, second, msg=None, extras=None):
-    """Assert equality of the string operands - where None is treated as equal to
-  an empty string (''), otherwise fail the test.
-
-  Error message is "first != second" by default. Additional explanation can
-  be supplied in the message.
-
-  Args:
-      first, seconds: The strings that are evaluated for equality.
-      msg: A string that adds additional info about the failure.
-      extras: An optional field for extra information to be included in
-              test result.
-  """
-    if first == None:
-        first = ''
-    if second == None:
-        second = ''
-    asserts.assert_equal(first, second, msg, extras)
-
-
-def get_aware_capabilities(ad):
-    """Get the Wi-Fi Aware capabilities from the specified device. The
-  capabilities are a dictionary keyed by aware_const.CAP_* keys.
-
-  Args:
-    ad: the Android device
-  Returns: the capability dictionary.
-  """
-    return json.loads(ad.adb.shell('cmd wifiaware state_mgr get_capabilities'))
-
-
-def get_wifi_mac_address(ad):
-    """Get the Wi-Fi interface MAC address as a upper-case string of hex digits
-  without any separators (e.g. ':').
-
-  Args:
-    ad: Device on which to run.
-  """
-    return ad.droid.wifiGetConnectionInfo()['mac_address'].upper().replace(
-        ':', '')
-
-
-def validate_forbidden_callbacks(ad, limited_cb=None):
-    """Validate that the specified callbacks have not been called more then permitted.
-
-  In addition to the input configuration also validates that forbidden callbacks
-  have never been called.
-
-  Args:
-    ad: Device on which to run.
-    limited_cb: Dictionary of CB_EV_* ids and maximum permitted calls (0
-                meaning never).
-  """
-    cb_data = json.loads(ad.adb.shell('cmd wifiaware native_cb get_cb_count'))
-
-    if limited_cb is None:
-        limited_cb = {}
-
-    fail = False
-    for cb_event in limited_cb.keys():
-        if cb_event in cb_data:
-            if cb_data[cb_event] > limited_cb[cb_event]:
-                fail = True
-                ad.log.info(
-                    'Callback %s observed %d times: more then permitted %d times',
-                    cb_event, cb_data[cb_event], limited_cb[cb_event])
-
-    asserts.assert_false(fail, 'Forbidden callbacks observed', extras=cb_data)
-
-
-def extract_stats(ad, data, results, key_prefix, log_prefix):
-    """Extract statistics from the data, store in the results dictionary, and
-  output to the info log.
-
-  Args:
-    ad: Android device (for logging)
-    data: A list containing the data to be analyzed.
-    results: A dictionary into which to place the statistics.
-    key_prefix: A string prefix to use for the dict keys storing the
-                extracted stats.
-    log_prefix: A string prefix to use for the info log.
-    include_data: If True includes the raw data in the dictionary,
-                  otherwise just the stats.
-  """
-    num_samples = len(data)
-    results['%snum_samples' % key_prefix] = num_samples
-
-    if not data:
-        return
-
-    data_min = min(data)
-    data_max = max(data)
-    data_mean = statistics.mean(data)
-    data_cdf = extract_cdf(data)
-    data_cdf_decile = extract_cdf_decile(data_cdf)
-
-    results['%smin' % key_prefix] = data_min
-    results['%smax' % key_prefix] = data_max
-    results['%smean' % key_prefix] = data_mean
-    results['%scdf' % key_prefix] = data_cdf
-    results['%scdf_decile' % key_prefix] = data_cdf_decile
-    results['%sraw_data' % key_prefix] = data
-
-    if num_samples > 1:
-        data_stdev = statistics.stdev(data)
-        results['%sstdev' % key_prefix] = data_stdev
-        ad.log.info(
-            '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, stdev=%.2f, cdf_decile=%s',
-            log_prefix, num_samples, data_min, data_max, data_mean, data_stdev,
-            data_cdf_decile)
-    else:
-        ad.log.info(
-            '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, cdf_decile=%s',
-            log_prefix, num_samples, data_min, data_max, data_mean,
-            data_cdf_decile)
-
-
-def extract_cdf_decile(cdf):
-    """Extracts the 10%, 20%, ..., 90% points from the CDF and returns their
-  value (a list of 9 values).
-
-  Since CDF may not (will not) have exact x% value picks the value >= x%.
-
-  Args:
-    cdf: a list of 2 lists, the X and Y of the CDF.
-  """
-    decades = []
-    next_decade = 10
-    for x, y in zip(cdf[0], cdf[1]):
-        while 100 * y >= next_decade:
-            decades.append(x)
-            next_decade = next_decade + 10
-        if next_decade == 100:
-            break
-    return decades
-
-
-def extract_cdf(data):
-    """Calculates the Cumulative Distribution Function (CDF) of the data.
-
-  Args:
-      data: A list containing data (does not have to be sorted).
-
-  Returns: a list of 2 lists: the X and Y axis of the CDF.
-  """
-    x = []
-    cdf = []
-    if not data:
-        return (x, cdf)
-
-    all_values = sorted(data)
-    for val in all_values:
-        if not x:
-            x.append(val)
-            cdf.append(1)
-        else:
-            if x[-1] == val:
-                cdf[-1] += 1
-            else:
-                x.append(val)
-                cdf.append(cdf[-1] + 1)
-
-    scale = 1.0 / len(all_values)
-    for i in range(len(cdf)):
-        cdf[i] = cdf[i] * scale
-
-    return (x, cdf)
-
-
-def get_mac_addr(device, interface):
-    """Get the MAC address of the specified interface. Uses ifconfig and parses
-  its output. Normalizes string to remove ':' and upper case.
-
-  Args:
-    device: Device on which to query the interface MAC address.
-    interface: Name of the interface for which to obtain the MAC address.
-  """
-    out = device.adb.shell("ifconfig %s" % interface)
-    res = re.match(".* HWaddr (\S+).*", out, re.S)
-    asserts.assert_true(res,
-                        'Unable to obtain MAC address for interface %s' %
-                        interface,
-                        extras=out)
-    return res.group(1).upper().replace(':', '')
-
-
-def get_ipv6_addr(device, interface):
-    """Get the IPv6 address of the specified interface. Uses ifconfig and parses
-  its output. Returns a None if the interface does not have an IPv6 address
-  (indicating it is not UP).
-
-  Args:
-    device: Device on which to query the interface IPv6 address.
-    interface: Name of the interface for which to obtain the IPv6 address.
-  """
-    out = device.adb.shell("ifconfig %s" % interface)
-    res = re.match(".*inet6 addr: (\S+)/.*", out, re.S)
-    if not res:
-        return None
-    return res.group(1)
-
-
-def verify_socket_connect(dut_s, dut_c, ipv6_s, ipv6_c, port):
-    """Verify the socket connection between server (dut_s) and client (dut_c)
-    using the given IPv6 addresses.
-
-    Opens a ServerSocket on the server and tries to connect to it
-    from the client.
-
-    Args:
-        dut_s, dut_c: the server and client devices under test (DUTs)
-        ipv6_s, ipv6_c: the scoped link-local addresses of the server and client.
-        port: the port to use
-    Return: True on success, False otherwise
-    """
-    server_sock = None
-    sock_c = None
-    sock_s = None
-    try:
-        server_sock = sutils.open_server_socket(dut_s, ipv6_s, port)
-        port_to_use = port
-        if port == 0:
-            port_to_use = dut_s.droid.getTcpServerSocketPort(server_sock)
-        sock_c, sock_s = sutils.open_connect_socket(dut_c, dut_s, ipv6_c,
-                                                    ipv6_s, 0, port_to_use,
-                                                    server_sock)
-    except:
-        return False
-    finally:
-        if sock_c is not None:
-            sutils.close_socket(dut_c, sock_c)
-        if sock_s is not None:
-            sutils.close_socket(dut_s, sock_s)
-        if server_sock is not None:
-            sutils.close_server_socket(dut_s, server_sock)
-    return True
-
-
-def run_ping6(dut, target_ip, duration=60):
-    """Run ping test and return the latency result
-
-    Args:
-        dut: the dut which run the ping cmd
-        target_ip: target IP Address for ping
-        duration: the duration time of the ping
-
-    return: dict contains "min/avg/max/mdev" result
-    """
-    cmd = "ping6 -w %d %s" % (duration, target_ip)
-    ping_result = dut.adb.shell(cmd, timeout=duration + 1)
-    res = re.match(".*mdev = (\S+) .*", ping_result, re.S)
-    asserts.assert_true(res, "Cannot reach the IP address %s", target_ip)
-    title = ["min", "avg", "max", "mdev"]
-    result = res.group(1).split("/")
-    latency_result = {}
-    for i in range(len(title)):
-        latency_result[title[i]] = result[i]
-    return latency_result
-
-
-def reset_device_parameters(ad):
-    """Reset device configurations.
-
-    Args:
-      ad: device to be reset
-    """
-    ad.adb.shell("cmd wifiaware reset")
-
-
-def reset_device_statistics(ad):
-    """Reset device statistics.
-
-    Args:
-        ad: device to be reset
-    """
-    ad.adb.shell("cmd wifiaware native_cb get_cb_count --reset")
-
-
-def set_power_mode_parameters(ad, power_mode):
-    """Set device power mode.
-
-    Set the power configuration DW parameters for the device based on any
-    configuration overrides (if provided)
-
-    Args:
-        ad: android device
-        power_mode: Desired power mode (INTERACTIVE or NON_INTERACTIVE)
-    """
-    if power_mode == "INTERACTIVE":
-        config_settings_high_power(ad)
-    elif power_mode == "NON_INTERACTIVE":
-        config_settings_low_power(ad)
-    else:
-        asserts.assert_false(
-            "The 'aware_default_power_mode' configuration must be INTERACTIVE or "
-            "NON_INTERACTIVE")
-
-
-#########################################################
-# Aware primitives
-#########################################################
-
-
-def request_network(dut, ns):
-    """Request a Wi-Fi Aware network.
-
-  Args:
-    dut: Device
-    ns: Network specifier
-  Returns: the request key
-  """
-    network_req = {"TransportType": 5, "NetworkSpecifier": ns}
-    return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
-
-
-def get_network_specifier(dut, id, dev_type, peer_mac, sec):
-    """Create a network specifier for the device based on the security
-  configuration.
-
-  Args:
-    dut: device
-    id: session ID
-    dev_type: device type - Initiator or Responder
-    peer_mac: the discovery MAC address of the peer
-    sec: security configuration
-  """
-    if sec is None:
-        return dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            id, dev_type, peer_mac)
-    if isinstance(sec, str):
-        return dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            id, dev_type, peer_mac, sec)
-    return dut.droid.wifiAwareCreateNetworkSpecifierOob(
-        id, dev_type, peer_mac, None, sec)
-
-
-def configure_power_setting(device, mode, name, value):
-    """Use the command-line API to configure the power setting
-
-  Args:
-    device: Device on which to perform configuration
-    mode: The power mode being set, should be "default", "inactive", or "idle"
-    name: One of the power settings from 'wifiaware set-power'.
-    value: An integer.
-  """
-    device.adb.shell("cmd wifiaware native_api set-power %s %s %d" %
-                     (mode, name, value))
-
-
-def configure_mac_random_interval(device, interval_sec):
-    """Use the command-line API to configure the MAC address randomization
-  interval.
-
-  Args:
-    device: Device on which to perform configuration
-    interval_sec: The MAC randomization interval in seconds. A value of 0
-                  disables all randomization.
-  """
-    device.adb.shell(
-        "cmd wifiaware native_api set mac_random_interval_sec %d" %
-        interval_sec)
-
-
-def configure_ndp_allow_any_override(device, override_api_check):
-    """Use the command-line API to configure whether an NDP Responder may be
-  configured to accept an NDP request from ANY peer.
-
-  By default the target API level of the requesting app determines whether such
-  configuration is permitted. This allows overriding the API check and allowing
-  it.
-
-  Args:
-    device: Device on which to perform configuration.
-    override_api_check: True to allow a Responder to ANY configuration, False to
-                        perform the API level check.
-  """
-    device.adb.shell("cmd wifiaware state_mgr allow_ndp_any %s" %
-                     ("true" if override_api_check else "false"))
-
-
-def config_settings_high_power(device):
-    """Configure device's power settings values to high power mode -
-  whether device is in interactive or non-interactive modes"""
-    configure_power_setting(device, "default", "dw_24ghz",
-                            aconsts.POWER_DW_24_INTERACTIVE)
-    configure_power_setting(device, "default", "dw_5ghz",
-                            aconsts.POWER_DW_5_INTERACTIVE)
-    configure_power_setting(device, "default", "disc_beacon_interval_ms",
-                            aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
-    configure_power_setting(device, "default", "num_ss_in_discovery",
-                            aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
-    configure_power_setting(device, "default", "enable_dw_early_term",
-                            aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
-
-    configure_power_setting(device, "inactive", "dw_24ghz",
-                            aconsts.POWER_DW_24_INTERACTIVE)
-    configure_power_setting(device, "inactive", "dw_5ghz",
-                            aconsts.POWER_DW_5_INTERACTIVE)
-    configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
-                            aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
-    configure_power_setting(device, "inactive", "num_ss_in_discovery",
-                            aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
-    configure_power_setting(device, "inactive", "enable_dw_early_term",
-                            aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
-
-
-def config_settings_low_power(device):
-    """Configure device's power settings values to low power mode - whether
-  device is in interactive or non-interactive modes"""
-    configure_power_setting(device, "default", "dw_24ghz",
-                            aconsts.POWER_DW_24_NON_INTERACTIVE)
-    configure_power_setting(device, "default", "dw_5ghz",
-                            aconsts.POWER_DW_5_NON_INTERACTIVE)
-    configure_power_setting(device, "default", "disc_beacon_interval_ms",
-                            aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
-    configure_power_setting(device, "default", "num_ss_in_discovery",
-                            aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
-    configure_power_setting(device, "default", "enable_dw_early_term",
-                            aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
-
-    configure_power_setting(device, "inactive", "dw_24ghz",
-                            aconsts.POWER_DW_24_NON_INTERACTIVE)
-    configure_power_setting(device, "inactive", "dw_5ghz",
-                            aconsts.POWER_DW_5_NON_INTERACTIVE)
-    configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
-                            aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
-    configure_power_setting(device, "inactive", "num_ss_in_discovery",
-                            aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
-    configure_power_setting(device, "inactive", "enable_dw_early_term",
-                            aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
-
-
-def config_power_settings(device,
-                          dw_24ghz,
-                          dw_5ghz,
-                          disc_beacon_interval=None,
-                          num_ss_in_disc=None,
-                          enable_dw_early_term=None):
-    """Configure device's discovery window (DW) values to the specified values -
-  whether the device is in interactive or non-interactive mode.
-
-  Args:
-    dw_24ghz: DW interval in the 2.4GHz band.
-    dw_5ghz: DW interval in the 5GHz band.
-    disc_beacon_interval: The discovery beacon interval (in ms). If None then
-                          not set.
-    num_ss_in_disc: Number of spatial streams to use for discovery. If None then
-                    not set.
-    enable_dw_early_term: If True then enable early termination of the DW. If
-                          None then not set.
-  """
-    configure_power_setting(device, "default", "dw_24ghz", dw_24ghz)
-    configure_power_setting(device, "default", "dw_5ghz", dw_5ghz)
-    configure_power_setting(device, "inactive", "dw_24ghz", dw_24ghz)
-    configure_power_setting(device, "inactive", "dw_5ghz", dw_5ghz)
-
-    if disc_beacon_interval is not None:
-        configure_power_setting(device, "default", "disc_beacon_interval_ms",
-                                disc_beacon_interval)
-        configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
-                                disc_beacon_interval)
-
-    if num_ss_in_disc is not None:
-        configure_power_setting(device, "default", "num_ss_in_discovery",
-                                num_ss_in_disc)
-        configure_power_setting(device, "inactive", "num_ss_in_discovery",
-                                num_ss_in_disc)
-
-    if enable_dw_early_term is not None:
-        configure_power_setting(device, "default", "enable_dw_early_term",
-                                enable_dw_early_term)
-        configure_power_setting(device, "inactive", "enable_dw_early_term",
-                                enable_dw_early_term)
-
-
-def create_discovery_config(service_name,
-                            d_type,
-                            ssi=None,
-                            match_filter=None,
-                            match_filter_list=None,
-                            ttl=0,
-                            term_cb_enable=True):
-    """Create a publish discovery configuration based on input parameters.
-
-  Args:
-    service_name: Service name - required
-    d_type: Discovery type (publish or subscribe constants)
-    ssi: Supplemental information - defaults to None
-    match_filter, match_filter_list: The match_filter, only one mechanism can
-                                     be used to specify. Defaults to None.
-    ttl: Time-to-live - defaults to 0 (i.e. non-self terminating)
-    term_cb_enable: True (default) to enable callback on termination, False
-                    means that no callback is called when session terminates.
-  Returns:
-    publish discovery configuration object.
-  """
-    config = {}
-    config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
-    config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = d_type
-    if ssi is not None:
-        config[aconsts.DISCOVERY_KEY_SSI] = ssi
-    if match_filter is not None:
-        config[aconsts.DISCOVERY_KEY_MATCH_FILTER] = match_filter
-    if match_filter_list is not None:
-        config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = match_filter_list
-    config[aconsts.DISCOVERY_KEY_TTL] = ttl
-    config[aconsts.DISCOVERY_KEY_TERM_CB_ENABLED] = term_cb_enable
-    return config
-
-
-def add_ranging_to_pub(p_config, enable_ranging):
-    """Add ranging enabled configuration to a publish configuration (only relevant
-  for publish configuration).
-
-  Args:
-    p_config: The Publish discovery configuration.
-    enable_ranging: True to enable ranging, False to disable.
-  Returns:
-    The modified publish configuration.
-  """
-    p_config[aconsts.DISCOVERY_KEY_RANGING_ENABLED] = enable_ranging
-    return p_config
-
-
-def add_ranging_to_sub(s_config, min_distance_mm, max_distance_mm):
-    """Add ranging distance configuration to a subscribe configuration (only
-  relevant to a subscribe configuration).
-
-  Args:
-    s_config: The Subscribe discovery configuration.
-    min_distance_mm, max_distance_mm: The min and max distance specification.
-                                      Used if not None.
-  Returns:
-    The modified subscribe configuration.
-  """
-    if min_distance_mm is not None:
-        s_config[aconsts.DISCOVERY_KEY_MIN_DISTANCE_MM] = min_distance_mm
-    if max_distance_mm is not None:
-        s_config[aconsts.DISCOVERY_KEY_MAX_DISTANCE_MM] = max_distance_mm
-    return s_config
-
-
-def attach_with_identity(dut):
-    """Start an Aware session (attach) and wait for confirmation and identity
-  information (mac address).
-
-  Args:
-    dut: Device under test
-  Returns:
-    id: Aware session ID.
-    mac: Discovery MAC address of this device.
-  """
-    id = dut.droid.wifiAwareAttach(True)
-    wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
-    event = wait_for_event(dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    mac = event["data"]["mac"]
-
-    return id, mac
-
-
-def create_discovery_pair(p_dut,
-                          s_dut,
-                          p_config,
-                          s_config,
-                          device_startup_offset,
-                          msg_id=None):
-    """Creates a discovery session (publish and subscribe), and waits for
-  service discovery - at that point the sessions are connected and ready for
-  further messaging of data-path setup.
-
-  Args:
-    p_dut: Device to use as publisher.
-    s_dut: Device to use as subscriber.
-    p_config: Publish configuration.
-    s_config: Subscribe configuration.
-    device_startup_offset: Number of seconds to offset the enabling of NAN on
-                           the two devices.
-    msg_id: Controls whether a message is sent from Subscriber to Publisher
-            (so that publisher has the sub's peer ID). If None then not sent,
-            otherwise should be an int for the message id.
-  Returns: variable size list of:
-    p_id: Publisher attach session id
-    s_id: Subscriber attach session id
-    p_disc_id: Publisher discovery session id
-    s_disc_id: Subscriber discovery session id
-    peer_id_on_sub: Peer ID of the Publisher as seen on the Subscriber
-    peer_id_on_pub: Peer ID of the Subscriber as seen on the Publisher. Only
-                    included if |msg_id| is not None.
-  """
-    p_dut.pretty_name = 'Publisher'
-    s_dut.pretty_name = 'Subscriber'
-
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach()
-    wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach()
-    wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
-
-    # Publisher: start publish and wait for confirmation
-    p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
-    wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
-
-    # Subscriber: start subscribe and wait for confirmation
-    s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
-    wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
-
-    # Subscriber: wait for service discovery
-    discovery_event = wait_for_event(s_dut,
-                                     aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-    peer_id_on_sub = discovery_event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
-
-    # Optionally send a message from Subscriber to Publisher
-    if msg_id is not None:
-        ping_msg = 'PING'
-
-        # Subscriber: send message to peer (Publisher)
-        s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, msg_id,
-                                         ping_msg, aconsts.MAX_TX_RETRIES)
-        sub_tx_msg_event = wait_for_event(s_dut,
-                                          aconsts.SESSION_CB_ON_MESSAGE_SENT)
-        asserts.assert_equal(
-            msg_id,
-            sub_tx_msg_event['data'][aconsts.SESSION_CB_KEY_MESSAGE_ID],
-            'Subscriber -> Publisher message ID corrupted')
-
-        # Publisher: wait for received message
-        pub_rx_msg_event = wait_for_event(
-            p_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-        peer_id_on_pub = pub_rx_msg_event['data'][
-            aconsts.SESSION_CB_KEY_PEER_ID]
-        asserts.assert_equal(
-            ping_msg,
-            pub_rx_msg_event['data'][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
-            'Subscriber -> Publisher message corrupted')
-        return p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub, peer_id_on_pub
-
-    return p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub
-
-
-def create_ib_ndp(p_dut, s_dut, p_config, s_config, device_startup_offset):
-    """Create an NDP (using in-band discovery)
-
-  Args:
-    p_dut: Device to use as publisher.
-    s_dut: Device to use as subscriber.
-    p_config: Publish configuration.
-    s_config: Subscribe configuration.
-    device_startup_offset: Number of seconds to offset the enabling of NAN on
-                           the two devices.
-  """
-    (p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
-     peer_id_on_pub) = create_discovery_pair(p_dut,
-                                             s_dut,
-                                             p_config,
-                                             s_config,
-                                             device_startup_offset,
-                                             msg_id=9999)
-
-    # Publisher: request network
-    p_req_key = request_network(
-        p_dut,
-        p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, peer_id_on_pub,
-                                                    None))
-
-    # Subscriber: request network
-    s_req_key = request_network(
-        s_dut,
-        s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id, peer_id_on_sub,
-                                                    None))
-
-    # Publisher & Subscriber: wait for network formation
-    p_net_event_nc = wait_for_event_with_keys(
-        p_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_CAPABILITIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-    s_net_event_nc = wait_for_event_with_keys(
-        s_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_CAPABILITIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, s_req_key))
-
-    # validate no leak of information
-    asserts.assert_false(
-        cconsts.NETWORK_CB_KEY_NETWORK_SPECIFIER in p_net_event_nc["data"],
-        "Network specifier leak!")
-    asserts.assert_false(
-        cconsts.NETWORK_CB_KEY_NETWORK_SPECIFIER in s_net_event_nc["data"],
-        "Network specifier leak!")
-
-    # note that Pub <-> Sub since IPv6 are of peer's!
-    p_ipv6 = s_net_event_nc["data"][aconsts.NET_CAP_IPV6]
-    s_ipv6 = p_net_event_nc["data"][aconsts.NET_CAP_IPV6]
-
-    p_net_event_lp = wait_for_event_with_keys(
-        p_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-    s_net_event_lp = wait_for_event_with_keys(
-        s_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, s_req_key))
-
-    p_aware_if = p_net_event_lp["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    s_aware_if = s_net_event_lp["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-
-    return p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6, s_ipv6
-
-
-def create_oob_ndp_on_sessions(init_dut, resp_dut, init_id, init_mac, resp_id,
-                               resp_mac):
-    """Create an NDP on top of existing Aware sessions (using OOB discovery)
-
-  Args:
-    init_dut: Initiator device
-    resp_dut: Responder device
-    init_id: Initiator attach session id
-    init_mac: Initiator discovery MAC address
-    resp_id: Responder attach session id
-    resp_mac: Responder discovery MAC address
-  Returns:
-    init_req_key: Initiator network request
-    resp_req_key: Responder network request
-    init_aware_if: Initiator Aware data interface
-    resp_aware_if: Responder Aware data interface
-    init_ipv6: Initiator IPv6 address
-    resp_ipv6: Responder IPv6 address
-  """
-    # Responder: request network
-    resp_req_key = request_network(
-        resp_dut,
-        resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
-
-    # Initiator: request network
-    init_req_key = request_network(
-        init_dut,
-        init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
-
-    # Initiator & Responder: wait for network formation
-    init_net_event_nc = wait_for_event_with_keys(
-        init_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_CAPABILITIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, init_req_key))
-    resp_net_event_nc = wait_for_event_with_keys(
-        resp_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_CAPABILITIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
-
-    # validate no leak of information
-    asserts.assert_false(
-        cconsts.NETWORK_CB_KEY_NETWORK_SPECIFIER in init_net_event_nc["data"],
-        "Network specifier leak!")
-    asserts.assert_false(
-        cconsts.NETWORK_CB_KEY_NETWORK_SPECIFIER in resp_net_event_nc["data"],
-        "Network specifier leak!")
-
-    # note that Init <-> Resp since IPv6 are of peer's!
-    resp_ipv6 = init_net_event_nc["data"][aconsts.NET_CAP_IPV6]
-    init_ipv6 = resp_net_event_nc["data"][aconsts.NET_CAP_IPV6]
-
-    init_net_event_lp = wait_for_event_with_keys(
-        init_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, init_req_key))
-    resp_net_event_lp = wait_for_event_with_keys(
-        resp_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
-
-    init_aware_if = init_net_event_lp['data'][
-        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    resp_aware_if = resp_net_event_lp['data'][
-        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-
-    return (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
-            init_ipv6, resp_ipv6)
-
-
-def create_oob_ndp(init_dut, resp_dut):
-    """Create an NDP (using OOB discovery)
-
-  Args:
-    init_dut: Initiator device
-    resp_dut: Responder device
-  """
-    init_dut.pretty_name = 'Initiator'
-    resp_dut.pretty_name = 'Responder'
-
-    # Initiator+Responder: attach and wait for confirmation & identity
-    init_id = init_dut.droid.wifiAwareAttach(True)
-    wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    init_ident_event = wait_for_event(init_dut,
-                                      aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    init_mac = init_ident_event['data']['mac']
-    resp_id = resp_dut.droid.wifiAwareAttach(True)
-    wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    resp_ident_event = wait_for_event(resp_dut,
-                                      aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    resp_mac = resp_ident_event['data']['mac']
-
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(WAIT_FOR_CLUSTER)
-
-    (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-     resp_ipv6) = create_oob_ndp_on_sessions(init_dut, resp_dut, init_id,
-                                             init_mac, resp_id, resp_mac)
-
-    return (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
-            init_ipv6, resp_ipv6)
diff --git a/src/antlion/test_utils/wifi/WifiBaseTest.py b/src/antlion/test_utils/wifi/base_test.py
similarity index 66%
rename from src/antlion/test_utils/wifi/WifiBaseTest.py
rename to src/antlion/test_utils/wifi/base_test.py
index ac770f9..7e97d8f 100644
--- a/src/antlion/test_utils/wifi/WifiBaseTest.py
+++ b/src/antlion/test_utils/wifi/base_test.py
@@ -21,7 +21,6 @@
 import os
 import time
 
-from antlion import asserts
 from antlion import context
 from antlion import signals
 from antlion import utils
@@ -34,6 +33,7 @@
 from antlion.test_utils.net import net_test_utils as nutils
 from antlion.test_utils.wifi import wifi_test_utils as wutils
 
+from mobly import asserts
 from mobly.base_test import STAGE_NAME_TEARDOWN_CLASS
 
 WifiEnums = wutils.WifiEnums
@@ -43,7 +43,6 @@
 
 
 class WifiBaseTest(BaseTestClass):
-
     def __init__(self, configs):
         super().__init__(configs)
         self.enable_packet_log = False
@@ -53,7 +52,7 @@
         self.packet_log_pid = {}
 
     def setup_class(self):
-        if hasattr(self, 'attenuators') and self.attenuators:
+        if hasattr(self, "attenuators") and self.attenuators:
             for attenuator in self.attenuators:
                 attenuator.set_atten(0)
         opt_param = ["pixel_models", "cnss_diag_file", "country_code_file"]
@@ -63,8 +62,8 @@
                 self.cnss_diag_file = self.cnss_diag_file[0]
             if not os.path.isfile(self.cnss_diag_file):
                 self.cnss_diag_file = os.path.join(
-                    self.user_params[Config.key_config_path.value],
-                    self.cnss_diag_file)
+                    self.user_params[Config.key_config_path.value], self.cnss_diag_file
+                )
         if self.enable_packet_log and hasattr(self, "packet_capture"):
             self.packet_logger = self.packet_capture[0]
             self.packet_logger.configure_monitor_mode("2G", self.packet_log_2g)
@@ -78,43 +77,46 @@
                     if not os.path.isfile(self.country_code_file):
                         self.country_code_file = os.path.join(
                             self.user_params[Config.key_config_path.value],
-                            self.country_code_file)
-                    self.country_code = utils.load_config(
-                        self.country_code_file)["country"]
+                            self.country_code_file,
+                        )
+                    self.country_code = utils.load_config(self.country_code_file)[
+                        "country"
+                    ]
                 else:
                     self.country_code = WifiEnums.CountryCode.US
                 wutils.set_wifi_country_code(ad, self.country_code)
 
     def setup_test(self):
-        if (hasattr(self, "android_devices")
-                and hasattr(self, "cnss_diag_file")
-                and hasattr(self, "pixel_models")):
-            wutils.start_cnss_diags(self.android_devices, self.cnss_diag_file,
-                                    self.pixel_models)
+        if (
+            hasattr(self, "android_devices")
+            and hasattr(self, "cnss_diag_file")
+            and hasattr(self, "pixel_models")
+        ):
+            wutils.start_cnss_diags(
+                self.android_devices, self.cnss_diag_file, self.pixel_models
+            )
         self.tcpdump_proc = []
         if hasattr(self, "android_devices"):
             for ad in self.android_devices:
                 proc = nutils.start_tcpdump(ad, self.test_name)
                 self.tcpdump_proc.append((ad, proc))
         if hasattr(self, "packet_logger"):
-            self.packet_log_pid = wutils.start_pcap(self.packet_logger, 'dual',
-                                                    self.test_name)
+            self.packet_log_pid = wutils.start_pcap(
+                self.packet_logger, "dual", self.test_name
+            )
 
     def teardown_test(self):
-        if (hasattr(self, "android_devices")
-                and hasattr(self, "cnss_diag_file")
-                and hasattr(self, "pixel_models")):
+        if (
+            hasattr(self, "android_devices")
+            and hasattr(self, "cnss_diag_file")
+            and hasattr(self, "pixel_models")
+        ):
             wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
             for proc in self.tcpdump_proc:
-                nutils.stop_tcpdump(proc[0],
-                                    proc[1],
-                                    self.test_name,
-                                    pull_dump=False)
+                nutils.stop_tcpdump(proc[0], proc[1], self.test_name, pull_dump=False)
             self.tcpdump_proc = []
         if hasattr(self, "packet_logger") and self.packet_log_pid:
-            wutils.stop_pcap(self.packet_logger,
-                             self.packet_log_pid,
-                             test_status=True)
+            wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=True)
             self.packet_log_pid = {}
 
     def teardown_class(self):
@@ -129,8 +131,7 @@
                 ad.take_bug_report(test_name, begin_time)
                 ad.cat_adb_log(test_name, begin_time)
                 wutils.get_ssrdumps(ad)
-            if (hasattr(self, "cnss_diag_file")
-                    and hasattr(self, "pixel_models")):
+            if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
                 wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
                 for ad in self.android_devices:
                     wutils.get_cnss_diag_log(ad)
@@ -138,9 +139,7 @@
                 nutils.stop_tcpdump(proc[0], proc[1], self.test_name)
             self.tcpdump_proc = []
         if hasattr(self, "packet_logger") and self.packet_log_pid:
-            wutils.stop_pcap(self.packet_logger,
-                             self.packet_log_pid,
-                             test_status=False)
+            wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=False)
             self.packet_log_pid = {}
 
         # Gets a wlan_device log and calls the generic device fail on DUT.
@@ -159,13 +158,14 @@
             test_name: Name of the test that triggered this function.
             begin_time: Logline format timestamp taken when the test started.
         """
-        if (not hasattr(device, "take_bug_report_on_fail")
-                or device.take_bug_report_on_fail):
+        if (
+            not hasattr(device, "take_bug_report_on_fail")
+            or device.take_bug_report_on_fail
+        ):
             device.take_bug_report(test_name, begin_time)
 
-        if hasattr(device,
-                   "hard_reboot_on_fail") and device.hard_reboot_on_fail:
-            device.reboot(reboot_type='hard', testbed_pdus=self.pdu_devices)
+        if hasattr(device, "hard_reboot_on_fail") and device.hard_reboot_on_fail:
+            device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices)
 
     def download_ap_logs(self):
         """Downloads the DHCP and hostapad logs from the access_point.
@@ -178,48 +178,50 @@
         dhcp_log = self.access_point.get_dhcp_logs()
         if dhcp_log:
             dhcp_log_path = os.path.join(current_path, "dhcp_log.txt")
-            with open(dhcp_log_path, 'w') as f:
+            with open(dhcp_log_path, "w") as f:
                 f.write(dhcp_log)
 
         hostapd_logs = self.access_point.get_hostapd_logs()
         for interface in hostapd_logs:
-            hostapd_log_path = os.path.join(current_path,
-                                            f'hostapd_log_{interface}.txt')
-            with open(hostapd_log_path, 'w') as f:
+            hostapd_log_path = os.path.join(
+                current_path, f"hostapd_log_{interface}.txt"
+            )
+            with open(hostapd_log_path, "w") as f:
                 f.write(hostapd_logs[interface])
 
         radvd_log = self.access_point.get_radvd_logs()
         if radvd_log:
             radvd_log_path = os.path.join(current_path, "radvd_log.txt")
-            with open(radvd_log_path, 'w') as f:
+            with open(radvd_log_path, "w") as f:
                 f.write(radvd_log)
 
     def get_psk_network(
-            self,
-            mirror_ap,
-            reference_networks,
-            hidden=False,
-            same_ssid=False,
-            security_mode=hostapd_constants.WPA2_STRING,
-            ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-            passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-            passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G):
+        self,
+        mirror_ap,
+        reference_networks,
+        hidden=False,
+        same_ssid=False,
+        security_mode=hostapd_constants.WPA2_STRING,
+        ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
+        ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
+        passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+    ):
         """Generates SSID and passphrase for a WPA2 network using random
-           generator.
+        generator.
 
-           Args:
-               mirror_ap: Boolean, determines if both APs use the same hostapd
-                          config or different configs.
-               reference_networks: List of PSK networks.
-               same_ssid: Boolean, determines if both bands on AP use the same
-                          SSID.
-               ssid_length_2gecond AP Int, number of characters to use for 2G SSID.
-               ssid_length_5g: Int, number of characters to use for 5G SSID.
-               passphrase_length_2g: Int, length of password for 2G network.
-               passphrase_length_5g: Int, length of password for 5G network.
+        Args:
+            mirror_ap: Boolean, determines if both APs use the same hostapd
+                       config or different configs.
+            reference_networks: List of PSK networks.
+            same_ssid: Boolean, determines if both bands on AP use the same
+                       SSID.
+            ssid_length_2gecond AP Int, number of characters to use for 2G SSID.
+            ssid_length_5g: Int, number of characters to use for 5G SSID.
+            passphrase_length_2g: Int, length of password for 2G network.
+            passphrase_length_5g: Int, length of password for 5G network.
 
-           Returns: A dict of 2G and 5G network lists for hostapd configuration.
+        Returns: A dict of 2G and 5G network lists for hostapd configuration.
 
         """
         network_dict_2g = {}
@@ -228,51 +230,52 @@
         ref_2g_security = security_mode
 
         if same_ssid:
-            ref_2g_ssid = 'xg_%s' % utils.rand_ascii_str(ssid_length_2g)
+            ref_2g_ssid = "xg_%s" % utils.rand_ascii_str(ssid_length_2g)
             ref_5g_ssid = ref_2g_ssid
 
             ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
             ref_5g_passphrase = ref_2g_passphrase
 
         else:
-            ref_2g_ssid = '2g_%s' % utils.rand_ascii_str(ssid_length_2g)
+            ref_2g_ssid = "2g_%s" % utils.rand_ascii_str(ssid_length_2g)
             ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
 
-            ref_5g_ssid = '5g_%s' % utils.rand_ascii_str(ssid_length_5g)
+            ref_5g_ssid = "5g_%s" % utils.rand_ascii_str(ssid_length_5g)
             ref_5g_passphrase = utils.rand_ascii_str(passphrase_length_5g)
 
         network_dict_2g = {
             "SSID": ref_2g_ssid,
             "security": ref_2g_security,
             "password": ref_2g_passphrase,
-            "hiddenSSID": hidden
+            "hiddenSSID": hidden,
         }
 
         network_dict_5g = {
             "SSID": ref_5g_ssid,
             "security": ref_5g_security,
             "password": ref_5g_passphrase,
-            "hiddenSSID": hidden
+            "hiddenSSID": hidden,
         }
 
         ap = 0
         for ap in range(MAX_AP_COUNT):
-            reference_networks.append({
-                "2g": copy.copy(network_dict_2g),
-                "5g": copy.copy(network_dict_5g)
-            })
+            reference_networks.append(
+                {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)}
+            )
             if not mirror_ap:
                 break
         return {"2g": network_dict_2g, "5g": network_dict_5g}
 
-    def get_open_network(self,
-                         mirror_ap,
-                         open_network,
-                         hidden=False,
-                         same_ssid=False,
-                         ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-                         ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-                         security_mode='none'):
+    def get_open_network(
+        self,
+        mirror_ap,
+        open_network,
+        hidden=False,
+        same_ssid=False,
+        ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
+        ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
+        security_mode="none",
+    ):
         """Generates SSIDs for a open network using a random generator.
 
         Args:
@@ -292,60 +295,60 @@
         network_dict_5g = {}
 
         if same_ssid:
-            open_2g_ssid = 'xg_%s' % utils.rand_ascii_str(ssid_length_2g)
+            open_2g_ssid = "xg_%s" % utils.rand_ascii_str(ssid_length_2g)
             open_5g_ssid = open_2g_ssid
 
         else:
-            open_2g_ssid = '2g_%s' % utils.rand_ascii_str(ssid_length_2g)
-            open_5g_ssid = '5g_%s' % utils.rand_ascii_str(ssid_length_5g)
+            open_2g_ssid = "2g_%s" % utils.rand_ascii_str(ssid_length_2g)
+            open_5g_ssid = "5g_%s" % utils.rand_ascii_str(ssid_length_5g)
 
         network_dict_2g = {
             "SSID": open_2g_ssid,
             "security": security_mode,
-            "hiddenSSID": hidden
+            "hiddenSSID": hidden,
         }
 
         network_dict_5g = {
             "SSID": open_5g_ssid,
             "security": security_mode,
-            "hiddenSSID": hidden
+            "hiddenSSID": hidden,
         }
 
         ap = 0
         for ap in range(MAX_AP_COUNT):
-            open_network.append({
-                "2g": copy.copy(network_dict_2g),
-                "5g": copy.copy(network_dict_5g)
-            })
+            open_network.append(
+                {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)}
+            )
             if not mirror_ap:
                 break
         return {"2g": network_dict_2g, "5g": network_dict_5g}
 
     def get_wep_network(
-            self,
-            mirror_ap,
-            networks,
-            hidden=False,
-            same_ssid=False,
-            ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-            passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-            passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G):
+        self,
+        mirror_ap,
+        networks,
+        hidden=False,
+        same_ssid=False,
+        ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
+        ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
+        passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+    ):
         """Generates SSID and passphrase for a WEP network using random
-           generator.
+        generator.
 
-           Args:
-               mirror_ap: Boolean, determines if both APs use the same hostapd
-                          config or different configs.
-               networks: List of WEP networks.
-               same_ssid: Boolean, determines if both bands on AP use the same
-                          SSID.
-               ssid_length_2gecond AP Int, number of characters to use for 2G SSID.
-               ssid_length_5g: Int, number of characters to use for 5G SSID.
-               passphrase_length_2g: Int, length of password for 2G network.
-               passphrase_length_5g: Int, length of password for 5G network.
+        Args:
+            mirror_ap: Boolean, determines if both APs use the same hostapd
+                       config or different configs.
+            networks: List of WEP networks.
+            same_ssid: Boolean, determines if both bands on AP use the same
+                       SSID.
+            ssid_length_2gecond AP Int, number of characters to use for 2G SSID.
+            ssid_length_5g: Int, number of characters to use for 5G SSID.
+            passphrase_length_2g: Int, length of password for 2G network.
+            passphrase_length_5g: Int, length of password for 5G network.
 
-           Returns: A dict of 2G and 5G network lists for hostapd configuration.
+        Returns: A dict of 2G and 5G network lists for hostapd configuration.
 
         """
         network_dict_2g = {}
@@ -354,39 +357,38 @@
         ref_2g_security = hostapd_constants.WEP_STRING
 
         if same_ssid:
-            ref_2g_ssid = 'xg_%s' % utils.rand_ascii_str(ssid_length_2g)
+            ref_2g_ssid = "xg_%s" % utils.rand_ascii_str(ssid_length_2g)
             ref_5g_ssid = ref_2g_ssid
 
             ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
             ref_5g_passphrase = ref_2g_passphrase
 
         else:
-            ref_2g_ssid = '2g_%s' % utils.rand_ascii_str(ssid_length_2g)
+            ref_2g_ssid = "2g_%s" % utils.rand_ascii_str(ssid_length_2g)
             ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
 
-            ref_5g_ssid = '5g_%s' % utils.rand_ascii_str(ssid_length_5g)
+            ref_5g_ssid = "5g_%s" % utils.rand_ascii_str(ssid_length_5g)
             ref_5g_passphrase = utils.rand_hex_str(passphrase_length_5g)
 
         network_dict_2g = {
             "SSID": ref_2g_ssid,
             "security": ref_2g_security,
             "wepKeys": [ref_2g_passphrase] * 4,
-            "hiddenSSID": hidden
+            "hiddenSSID": hidden,
         }
 
         network_dict_5g = {
             "SSID": ref_5g_ssid,
             "security": ref_5g_security,
             "wepKeys": [ref_2g_passphrase] * 4,
-            "hiddenSSID": hidden
+            "hiddenSSID": hidden,
         }
 
         ap = 0
         for ap in range(MAX_AP_COUNT):
-            networks.append({
-                "2g": copy.copy(network_dict_2g),
-                "5g": copy.copy(network_dict_5g)
-            })
+            networks.append(
+                {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)}
+            )
             if not mirror_ap:
                 break
         return {"2g": network_dict_2g, "5g": network_dict_5g}
@@ -416,7 +418,7 @@
                 self.ent_networks[ap_instance][band]["bssid"] = bssid
             else:
                 self.ent_networks_pwd[ap_instance][band]["bssid"] = bssid
-        if network["security"] == 'none':
+        if network["security"] == "none":
             self.open_network[ap_instance][band]["bssid"] = bssid
 
     def populate_bssid(self, ap_instance, ap, networks_5g, networks_2g):
@@ -434,44 +436,43 @@
             return
 
         for network in networks_5g:
-            if 'channel' in network:
+            if "channel" in network:
                 continue
-            self.update_bssid(ap_instance, ap, network,
-                              hostapd_constants.BAND_5G)
+            self.update_bssid(ap_instance, ap, network, hostapd_constants.BAND_5G)
 
         for network in networks_2g:
-            if 'channel' in network:
+            if "channel" in network:
                 continue
-            self.update_bssid(ap_instance, ap, network,
-                              hostapd_constants.BAND_2G)
+            self.update_bssid(ap_instance, ap, network, hostapd_constants.BAND_2G)
 
     def configure_openwrt_ap_and_start(
-            self,
-            channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            channel_5g_ap2=None,
-            channel_2g_ap2=None,
-            ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-            passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-            passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
-            mirror_ap=False,
-            hidden=False,
-            same_ssid=False,
-            open_network=False,
-            wpa1_network=False,
-            wpa_network=False,
-            wep_network=False,
-            ent_network=False,
-            ent_network_pwd=False,
-            owe_network=False,
-            sae_network=False,
-            saemixed_network=False,
-            radius_conf_2g=None,
-            radius_conf_5g=None,
-            radius_conf_pwd=None,
-            ap_count=1,
-            ieee80211w=None):
+        self,
+        channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+        channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+        channel_5g_ap2=None,
+        channel_2g_ap2=None,
+        ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
+        passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
+        passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+        mirror_ap=False,
+        hidden=False,
+        same_ssid=False,
+        open_network=False,
+        wpa1_network=False,
+        wpa_network=False,
+        wep_network=False,
+        ent_network=False,
+        ent_network_pwd=False,
+        owe_network=False,
+        sae_network=False,
+        saemixed_network=False,
+        radius_conf_2g=None,
+        radius_conf_5g=None,
+        radius_conf_pwd=None,
+        ap_count=1,
+        ieee80211w=None,
+    ):
         """Create, configure and start OpenWrt AP.
 
         Args:
@@ -501,8 +502,7 @@
         if mirror_ap and ap_count == 1:
             raise ValueError("ap_count cannot be 1 if mirror_ap is True.")
         if (channel_5g_ap2 or channel_2g_ap2) and ap_count == 1:
-            raise ValueError(
-                "ap_count cannot be 1 if channels of AP2 are provided.")
+            raise ValueError("ap_count cannot be 1 if channels of AP2 are provided.")
         # we are creating a channel list for 2G and 5G bands. The list is of
         # size 2 and this is based on the assumption that each testbed will have
         # at most 2 APs.
@@ -527,12 +527,16 @@
         for i in range(ap_count):
             network_list = []
             if wpa1_network:
-                wpa1_dict = self.get_psk_network(mirror_ap, self.wpa1_networks,
-                                                 hidden, same_ssid,
-                                                 ssid_length_2g,
-                                                 ssid_length_5g,
-                                                 passphrase_length_2g,
-                                                 passphrase_length_5g)
+                wpa1_dict = self.get_psk_network(
+                    mirror_ap,
+                    self.wpa1_networks,
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    passphrase_length_2g,
+                    passphrase_length_5g,
+                )
                 wpa1_dict[hostapd_constants.BAND_2G]["security"] = "psk"
                 wpa1_dict[hostapd_constants.BAND_5G]["security"] = "psk"
                 wpa1_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
@@ -540,12 +544,16 @@
                 self.wpa1_networks.append(wpa1_dict)
                 network_list.append(wpa1_dict)
             if wpa_network:
-                wpa_dict = self.get_psk_network(mirror_ap,
-                                                self.reference_networks,
-                                                hidden, same_ssid,
-                                                ssid_length_2g, ssid_length_5g,
-                                                passphrase_length_2g,
-                                                passphrase_length_5g)
+                wpa_dict = self.get_psk_network(
+                    mirror_ap,
+                    self.reference_networks,
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    passphrase_length_2g,
+                    passphrase_length_5g,
+                )
                 wpa_dict[hostapd_constants.BAND_2G]["security"] = "psk2"
                 wpa_dict[hostapd_constants.BAND_5G]["security"] = "psk2"
                 wpa_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
@@ -553,105 +561,135 @@
                 self.wpa_networks.append(wpa_dict)
                 network_list.append(wpa_dict)
             if wep_network:
-                wep_dict = self.get_wep_network(mirror_ap, self.wep_networks,
-                                                hidden, same_ssid,
-                                                ssid_length_2g, ssid_length_5g)
+                wep_dict = self.get_wep_network(
+                    mirror_ap,
+                    self.wep_networks,
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
                 network_list.append(wep_dict)
             if ent_network:
-                ent_dict = self.get_open_network(mirror_ap, self.ent_networks,
-                                                 hidden, same_ssid,
-                                                 ssid_length_2g,
-                                                 ssid_length_5g)
+                ent_dict = self.get_open_network(
+                    mirror_ap,
+                    self.ent_networks,
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
                 ent_dict["2g"]["security"] = "wpa2"
                 ent_dict["2g"].update(radius_conf_2g)
                 ent_dict["5g"]["security"] = "wpa2"
                 ent_dict["5g"].update(radius_conf_5g)
                 network_list.append(ent_dict)
             if ent_network_pwd:
-                ent_pwd_dict = self.get_open_network(mirror_ap,
-                                                     self.ent_networks_pwd,
-                                                     hidden, same_ssid,
-                                                     ssid_length_2g,
-                                                     ssid_length_5g)
+                ent_pwd_dict = self.get_open_network(
+                    mirror_ap,
+                    self.ent_networks_pwd,
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
                 ent_pwd_dict["2g"]["security"] = "wpa2"
                 ent_pwd_dict["2g"].update(radius_conf_pwd)
                 ent_pwd_dict["5g"]["security"] = "wpa2"
                 ent_pwd_dict["5g"].update(radius_conf_pwd)
                 network_list.append(ent_pwd_dict)
             if open_network:
-                open_dict = self.get_open_network(mirror_ap, self.open_network,
-                                                  hidden, same_ssid,
-                                                  ssid_length_2g,
-                                                  ssid_length_5g)
+                open_dict = self.get_open_network(
+                    mirror_ap,
+                    self.open_network,
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
                 network_list.append(open_dict)
             if owe_network:
-                owe_dict = self.get_open_network(mirror_ap, self.owe_networks,
-                                                 hidden, same_ssid,
-                                                 ssid_length_2g,
-                                                 ssid_length_5g, "OWE")
+                owe_dict = self.get_open_network(
+                    mirror_ap,
+                    self.owe_networks,
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    "OWE",
+                )
                 owe_dict[hostapd_constants.BAND_2G]["security"] = "owe"
                 owe_dict[hostapd_constants.BAND_5G]["security"] = "owe"
                 network_list.append(owe_dict)
             if sae_network:
-                sae_dict = self.get_psk_network(mirror_ap, self.sae_networks,
-                                                hidden, same_ssid,
-                                                hostapd_constants.SAE_KEY_MGMT,
-                                                ssid_length_2g, ssid_length_5g,
-                                                passphrase_length_2g,
-                                                passphrase_length_5g)
+                sae_dict = self.get_psk_network(
+                    mirror_ap,
+                    self.sae_networks,
+                    hidden,
+                    same_ssid,
+                    hostapd_constants.SAE_KEY_MGMT,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    passphrase_length_2g,
+                    passphrase_length_5g,
+                )
                 sae_dict[hostapd_constants.BAND_2G]["security"] = "sae"
                 sae_dict[hostapd_constants.BAND_5G]["security"] = "sae"
                 network_list.append(sae_dict)
             if saemixed_network:
                 saemixed_dict = self.get_psk_network(
-                    mirror_ap, self.saemixed_networks, hidden, same_ssid,
-                    hostapd_constants.SAE_KEY_MGMT, ssid_length_2g,
-                    ssid_length_5g, passphrase_length_2g, passphrase_length_5g)
-                saemixed_dict[
-                    hostapd_constants.BAND_2G]["security"] = "sae-mixed"
-                saemixed_dict[
-                    hostapd_constants.BAND_5G]["security"] = "sae-mixed"
-                saemixed_dict[
-                    hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
-                saemixed_dict[
-                    hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w
+                    mirror_ap,
+                    self.saemixed_networks,
+                    hidden,
+                    same_ssid,
+                    hostapd_constants.SAE_KEY_MGMT,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    passphrase_length_2g,
+                    passphrase_length_5g,
+                )
+                saemixed_dict[hostapd_constants.BAND_2G]["security"] = "sae-mixed"
+                saemixed_dict[hostapd_constants.BAND_5G]["security"] = "sae-mixed"
+                saemixed_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
+                saemixed_dict[hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w
                 network_list.append(saemixed_dict)
-            self.access_points[i].configure_ap(network_list, channels_2g[i],
-                                               channels_5g[i])
+            self.access_points[i].configure_ap(
+                network_list, channels_2g[i], channels_5g[i]
+            )
             self.access_points[i].start_ap()
-            self.bssid_map.append(
-                self.access_points[i].get_bssids_for_wifi_networks())
+            self.bssid_map.append(self.access_points[i].get_bssids_for_wifi_networks())
             if mirror_ap:
-                self.access_points[i + 1].configure_ap(network_list,
-                                                       channels_2g[i + 1],
-                                                       channels_5g[i + 1])
+                self.access_points[i + 1].configure_ap(
+                    network_list, channels_2g[i + 1], channels_5g[i + 1]
+                )
                 self.access_points[i + 1].start_ap()
                 self.bssid_map.append(
-                    self.access_points[i + 1].get_bssids_for_wifi_networks())
+                    self.access_points[i + 1].get_bssids_for_wifi_networks()
+                )
                 break
 
     def legacy_configure_ap_and_start(
-            self,
-            channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            max_2g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_2G,
-            max_5g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_5G,
-            ap_ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-            ap_passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-            ap_ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-            ap_passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
-            hidden=False,
-            same_ssid=False,
-            mirror_ap=True,
-            wpa_network=False,
-            wep_network=False,
-            ent_network=False,
-            radius_conf_2g=None,
-            radius_conf_5g=None,
-            ent_network_pwd=False,
-            radius_conf_pwd=None,
-            ap_count=1):
-
+        self,
+        channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+        channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+        max_2g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_2G,
+        max_5g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_5G,
+        ap_ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
+        ap_passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        ap_ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
+        ap_passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+        hidden=False,
+        same_ssid=False,
+        mirror_ap=True,
+        wpa_network=False,
+        wep_network=False,
+        ent_network=False,
+        radius_conf_2g=None,
+        radius_conf_5g=None,
+        ent_network_pwd=False,
+        radius_conf_pwd=None,
+        ap_count=1,
+    ):
         config_count = 1
         count = 0
 
@@ -680,7 +718,6 @@
             self._cleanup_hostapd_and_dhcpd(i)
 
         for count in range(config_count):
-
             network_list_2g = []
             network_list_5g = []
 
@@ -694,7 +731,8 @@
                 mirror_ap,
                 self.user_params["reference_networks"],
                 hidden=hidden,
-                same_ssid=same_ssid)
+                same_ssid=same_ssid,
+            )
             self.reference_networks = self.user_params["reference_networks"]
 
             network_list_2g.append(networks_dict["2g"])
@@ -710,7 +748,8 @@
                     mirror_ap,
                     self.user_params["open_network"],
                     hidden=hidden,
-                    same_ssid=same_ssid)
+                    same_ssid=same_ssid,
+                )
                 self.open_network = self.user_params["open_network"]
 
                 network_list_2g.append(networks_dict["2g"])
@@ -722,7 +761,8 @@
                         self.user_params["wpa_networks"],
                         hidden=hidden,
                         same_ssid=same_ssid,
-                        security_mode=hostapd_constants.WPA_STRING)
+                        security_mode=hostapd_constants.WPA_STRING,
+                    )
                     self.wpa_networks = self.user_params["wpa_networks"]
 
                     network_list_2g.append(networks_dict["2g"])
@@ -733,7 +773,8 @@
                         mirror_ap,
                         self.user_params["wep_networks"],
                         hidden=hidden,
-                        same_ssid=same_ssid)
+                        same_ssid=same_ssid,
+                    )
                     self.wep_networks = self.user_params["wep_networks"]
 
                     network_list_2g.append(networks_dict["2g"])
@@ -744,12 +785,11 @@
                         mirror_ap,
                         self.user_params["ent_networks"],
                         hidden=hidden,
-                        same_ssid=same_ssid)
-                    networks_dict["2g"][
-                        "security"] = hostapd_constants.ENT_STRING
+                        same_ssid=same_ssid,
+                    )
+                    networks_dict["2g"]["security"] = hostapd_constants.ENT_STRING
                     networks_dict["2g"].update(radius_conf_2g)
-                    networks_dict["5g"][
-                        "security"] = hostapd_constants.ENT_STRING
+                    networks_dict["5g"]["security"] = hostapd_constants.ENT_STRING
                     networks_dict["5g"].update(radius_conf_5g)
                     self.ent_networks = self.user_params["ent_networks"]
 
@@ -761,15 +801,13 @@
                         mirror_ap,
                         self.user_params["ent_networks_pwd"],
                         hidden=hidden,
-                        same_ssid=same_ssid)
-                    networks_dict["2g"][
-                        "security"] = hostapd_constants.ENT_STRING
+                        same_ssid=same_ssid,
+                    )
+                    networks_dict["2g"]["security"] = hostapd_constants.ENT_STRING
                     networks_dict["2g"].update(radius_conf_pwd)
-                    networks_dict["5g"][
-                        "security"] = hostapd_constants.ENT_STRING
+                    networks_dict["5g"]["security"] = hostapd_constants.ENT_STRING
                     networks_dict["5g"].update(radius_conf_pwd)
-                    self.ent_networks_pwd = self.user_params[
-                        "ent_networks_pwd"]
+                    self.ent_networks_pwd = self.user_params["ent_networks_pwd"]
 
                     network_list_2g.append(networks_dict["2g"])
                     network_list_5g.append(networks_dict["5g"])
@@ -778,26 +816,32 @@
             orig_network_list_2g = copy.copy(network_list_2g)
 
             if len(network_list_5g) > 1:
-                self.config_5g = self._generate_legacy_ap_config(
-                    network_list_5g)
+                self.config_5g = self._generate_legacy_ap_config(network_list_5g)
             if len(network_list_2g) > 1:
-                self.config_2g = self._generate_legacy_ap_config(
-                    network_list_2g)
+                self.config_2g = self._generate_legacy_ap_config(network_list_2g)
 
             self.access_points[count].start_ap(self.config_2g)
             self.access_points[count].start_ap(self.config_5g)
-            self.populate_bssid(count, self.access_points[count],
-                                orig_network_list_5g, orig_network_list_2g)
+            self.populate_bssid(
+                count,
+                self.access_points[count],
+                orig_network_list_5g,
+                orig_network_list_2g,
+            )
 
         # Repeat configuration on the second router.
         if mirror_ap and ap_count == 2:
             self.access_points[AP_2].start_ap(self.config_2g)
             self.access_points[AP_2].start_ap(self.config_5g)
-            self.populate_bssid(AP_2, self.access_points[AP_2],
-                                orig_network_list_5g, orig_network_list_2g)
+            self.populate_bssid(
+                AP_2,
+                self.access_points[AP_2],
+                orig_network_list_5g,
+                orig_network_list_2g,
+            )
 
     def _kill_processes(self, ap, daemon):
-        """ Kill hostapd and dhcpd daemons
+        """Kill hostapd and dhcpd daemons
 
         Args:
             ap: AP to cleanup
@@ -806,17 +850,17 @@
         Returns: True/False if killing process is successful
         """
         self.log.info("Killing %s" % daemon)
-        pids = ap.ssh.run('pidof %s' % daemon, ignore_status=True)
+        pids = ap.ssh.run("pidof %s" % daemon, ignore_status=True)
         if pids.stdout:
-            ap.ssh.run('kill %s' % pids.stdout, ignore_status=True)
+            ap.ssh.run("kill %s" % pids.stdout, ignore_status=True)
         time.sleep(3)
-        pids = ap.ssh.run('pidof %s' % daemon, ignore_status=True)
+        pids = ap.ssh.run("pidof %s" % daemon, ignore_status=True)
         if pids.stdout:
             return False
         return True
 
     def _cleanup_hostapd_and_dhcpd(self, count):
-        """ Check if AP was cleaned up properly
+        """Check if AP was cleaned up properly
 
         Kill hostapd and dhcpd processes if cleanup was not successful in the
         last run
@@ -834,7 +878,7 @@
         phy_ifaces = ap.interfaces.get_physical_interface()
         kill_hostapd = False
         for iface in phy_ifaces:
-            if '2g_' in iface or '5g_' in iface or 'xg_' in iface:
+            if "2g_" in iface or "5g_" in iface or "xg_" in iface:
                 kill_hostapd = True
                 break
 
@@ -842,11 +886,12 @@
             return
 
         self.log.debug("Cleanup AP")
-        if not self._kill_processes(ap, 'hostapd') or \
-            not self._kill_processes(ap, 'dhcpd'):
+        if not self._kill_processes(ap, "hostapd") or not self._kill_processes(
+            ap, "dhcpd"
+        ):
             raise ("Failed to cleanup AP")
 
-        ap.__init__(self.user_params['AccessPoint'][count])
+        ap.__init__(self.user_params["AccessPoint"][count])
 
     def _generate_legacy_ap_config(self, network_list):
         bss_settings = []
@@ -867,7 +912,10 @@
                         hidden=network["hiddenSSID"],
                         security=hostapd_security.Security(
                             security_mode=network["security"],
-                            password=network["password"])))
+                            password=network["password"],
+                        ),
+                    )
+                )
             elif "wepKeys" in network:
                 bss_settings.append(
                     hostapd_bss_settings.BssSettings(
@@ -876,7 +924,10 @@
                         hidden=network["hiddenSSID"],
                         security=hostapd_security.Security(
                             security_mode=network["security"],
-                            password=network["wepKeys"][0])))
+                            password=network["wepKeys"][0],
+                        ),
+                    )
+                )
             elif network["security"] == hostapd_constants.ENT_STRING:
                 bss_settings.append(
                     hostapd_bss_settings.BssSettings(
@@ -887,14 +938,18 @@
                             security_mode=network["security"],
                             radius_server_ip=network["radius_server_ip"],
                             radius_server_port=network["radius_server_port"],
-                            radius_server_secret=network[
-                                "radius_server_secret"])))
+                            radius_server_secret=network["radius_server_secret"],
+                        ),
+                    )
+                )
             else:
                 bss_settings.append(
                     hostapd_bss_settings.BssSettings(
                         name=network["SSID"],
                         ssid=network["SSID"],
-                        hidden=network["hiddenSSID"]))
+                        hidden=network["hiddenSSID"],
+                    )
+                )
         if "password" in hostapd_config_settings:
             config = hostapd_ap_preset.create_ap_preset(
                 iface_wlan_2g=wlan_2g,
@@ -904,8 +959,10 @@
                 hidden=hostapd_config_settings["hiddenSSID"],
                 security=hostapd_security.Security(
                     security_mode=hostapd_config_settings["security"],
-                    password=hostapd_config_settings["password"]),
-                bss_settings=bss_settings)
+                    password=hostapd_config_settings["password"],
+                ),
+                bss_settings=bss_settings,
+            )
         elif "wepKeys" in hostapd_config_settings:
             config = hostapd_ap_preset.create_ap_preset(
                 iface_wlan_2g=wlan_2g,
@@ -915,8 +972,10 @@
                 hidden=hostapd_config_settings["hiddenSSID"],
                 security=hostapd_security.Security(
                     security_mode=hostapd_config_settings["security"],
-                    password=hostapd_config_settings["wepKeys"][0]),
-                bss_settings=bss_settings)
+                    password=hostapd_config_settings["wepKeys"][0],
+                ),
+                bss_settings=bss_settings,
+            )
         else:
             config = hostapd_ap_preset.create_ap_preset(
                 iface_wlan_2g=wlan_2g,
@@ -924,13 +983,15 @@
                 channel=ap_settings["channel"],
                 ssid=hostapd_config_settings["SSID"],
                 hidden=hostapd_config_settings["hiddenSSID"],
-                bss_settings=bss_settings)
+                bss_settings=bss_settings,
+            )
         return config
 
     def configure_packet_capture(
-            self,
-            channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G):
+        self,
+        channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+        channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+    ):
         """Configure packet capture for 2G and 5G bands.
 
         Args:
@@ -939,21 +1000,25 @@
         """
         self.packet_capture = self.packet_capture[0]
         result = self.packet_capture.configure_monitor_mode(
-            hostapd_constants.BAND_2G, channel_2g)
+            hostapd_constants.BAND_2G, channel_2g
+        )
         if not result:
             raise ValueError("Failed to configure channel for 2G band")
 
         result = self.packet_capture.configure_monitor_mode(
-            hostapd_constants.BAND_5G, channel_5g)
+            hostapd_constants.BAND_5G, channel_5g
+        )
         if not result:
             raise ValueError("Failed to configure channel for 5G band.")
 
     @staticmethod
     def wifi_test_wrap(fn):
-
         def _safe_wrap_test_case(self, *args, **kwargs):
-            test_id = "%s:%s:%s" % (self.__class__.__name__, self.test_name,
-                                    self.log_begin_time.replace(' ', '-'))
+            test_id = "%s:%s:%s" % (
+                self.__class__.__name__,
+                self.test_name,
+                self.log_begin_time.replace(" ", "-"),
+            )
             self.test_id = test_id
             self.result_detail = ""
             tries = int(self.user_params.get("wifi_auto_rerun", 3))
@@ -962,8 +1027,7 @@
             for i in range(tries + 1):
                 result = True
                 if i > 0:
-                    log_string = "[Test Case] RETRY:%s %s" % (i,
-                                                              self.test_name)
+                    log_string = "[Test Case] RETRY:%s %s" % (i, self.test_name)
                     self.log.info(log_string)
                     self._teardown_test(self.test_name)
                     self._setup_test(self.test_name)
diff --git a/src/antlion/test_utils/wifi/ota_chamber.py b/src/antlion/test_utils/wifi/ota_chamber.py
deleted file mode 100644
index 215c349..0000000
--- a/src/antlion/test_utils/wifi/ota_chamber.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import contextlib
-import io
-import serial
-import time
-from antlion import logger
-from antlion import utils
-
-SHORT_SLEEP = 1
-CHAMBER_SLEEP = 30
-
-
-def create(configs):
-    """Factory method for OTA chambers.
-
-    Args:
-        configs: list of dicts with chamber settings. settings must contain the
-        following: type (string denoting type of chamber)
-    """
-    objs = []
-    for config in configs:
-        try:
-            chamber_class = globals()[config['model']]
-        except KeyError:
-            raise KeyError('Invalid chamber configuration.')
-        objs.append(chamber_class(config))
-    return objs
-
-
-def detroy(objs):
-    return
-
-
-class OtaChamber(object):
-    """Base class implementation for OTA chamber.
-
-    Base class provides functions whose implementation is shared by all
-    chambers.
-    """
-    def reset_chamber(self):
-        """Resets the chamber to its zero/home state."""
-        raise NotImplementedError
-
-    def set_orientation(self, orientation):
-        """Set orientation for turn table in OTA chamber.
-
-        Args:
-            angle: desired turn table orientation in degrees
-        """
-        raise NotImplementedError
-
-    def set_stirrer_pos(self, stirrer_id, position):
-        """Starts turntables and stirrers in OTA chamber."""
-        raise NotImplementedError
-
-    def start_continuous_stirrers(self):
-        """Starts turntables and stirrers in OTA chamber."""
-        raise NotImplementedError
-
-    def stop_continuous_stirrers(self):
-        """Stops turntables and stirrers in OTA chamber."""
-        raise NotImplementedError
-
-    def step_stirrers(self, steps):
-        """Move stepped stirrers in OTA chamber to next step."""
-        raise NotImplementedError
-
-
-class MockChamber(OtaChamber):
-    """Class that implements mock chamber for test development and debug."""
-    def __init__(self, config):
-        self.config = config.copy()
-        self.device_id = self.config['device_id']
-        self.log = logger.create_tagged_trace_logger('OtaChamber|{}'.format(
-            self.device_id))
-        self.current_mode = None
-        self.SUPPORTED_BANDS = ['2.4GHz', 'UNII-1', 'UNII-2', 'UNII-3', '6GHz']
-
-    def set_orientation(self, orientation):
-        self.log.info('Setting orientation to {} degrees.'.format(orientation))
-
-    def reset_chamber(self):
-        self.log.info('Resetting chamber to home state')
-
-    def set_stirrer_pos(self, stirrer_id, position):
-        """Starts turntables and stirrers in OTA chamber."""
-        self.log.info('Setting stirrer {} to {}.'.format(stirrer_id, position))
-
-    def start_continuous_stirrers(self):
-        """Starts turntables and stirrers in OTA chamber."""
-        self.log.info('Starting continuous stirrer motion')
-
-    def stop_continuous_stirrers(self):
-        """Stops turntables and stirrers in OTA chamber."""
-        self.log.info('Stopping continuous stirrer motion')
-
-    def configure_stepped_stirrers(self, steps):
-        """Programs parameters for stepped stirrers in OTA chamber."""
-        self.log.info('Configuring stepped stirrers')
-
-    def step_stirrers(self, steps):
-        """Move stepped stirrers in OTA chamber to next step."""
-        self.log.info('Moving stirrers to the next step')
-
-
-class OctoboxChamber(OtaChamber):
-    """Class that implements Octobox chamber."""
-    def __init__(self, config):
-        self.config = config.copy()
-        self.device_id = self.config['device_id']
-        self.log = logger.create_tagged_trace_logger('OtaChamber|{}'.format(
-            self.device_id))
-        self.TURNTABLE_FILE_PATH = '/usr/local/bin/fnPerformaxCmd'
-        utils.exe_cmd('sudo {} -d {} -i 0'.format(self.TURNTABLE_FILE_PATH,
-                                                  self.device_id))
-        self.current_mode = None
-        self.SUPPORTED_BANDS = ['2.4GHz', 'UNII-1', 'UNII-2', 'UNII-3', '6GHz']
-
-    def set_orientation(self, orientation):
-        self.log.info('Setting orientation to {} degrees.'.format(orientation))
-        utils.exe_cmd('sudo {} -d {} -p {}'.format(self.TURNTABLE_FILE_PATH,
-                                                   self.device_id,
-                                                   orientation))
-
-    def reset_chamber(self):
-        self.log.info('Resetting chamber to home state')
-        self.set_orientation(0)
-
-
-class ChamberAutoConnect(object):
-    def __init__(self, chamber, chamber_config):
-        self._chamber = chamber
-        self._config = chamber_config
-
-    def __getattr__(self, item):
-        def chamber_call(*args, **kwargs):
-            self._chamber.connect(self._config['ip_address'],
-                                  self._config['username'],
-                                  self._config['password'])
-            return getattr(self._chamber, item)(*args, **kwargs)
-
-        return chamber_call
-
-
-class BluetestChamber(OtaChamber):
-    """Class that implements Octobox chamber."""
-    def __init__(self, config):
-        import flow
-        self.config = config.copy()
-        self.log = logger.create_tagged_trace_logger('OtaChamber|{}'.format(
-            self.config['ip_address']))
-        self.chamber = ChamberAutoConnect(flow.Flow(), self.config)
-        self.stirrer_ids = [0, 1, 2]
-        self.current_mode = None
-        self.SUPPORTED_BANDS = ['2.4GHz', 'UNII-1', 'UNII-2', 'UNII-3']
-
-    # Capture print output decorator
-    @staticmethod
-    def _capture_output(func, *args, **kwargs):
-        """Creates a decorator to capture stdout from bluetest module"""
-        f = io.StringIO()
-        with contextlib.redirect_stdout(f):
-            func(*args, **kwargs)
-        output = f.getvalue()
-        return output
-
-    def _connect(self):
-        self.chamber.connect(self.config['ip_address'],
-                             self.config['username'], self.config['password'])
-
-    def _init_manual_mode(self):
-        self.current_mode = 'manual'
-        for stirrer_id in self.stirrer_ids:
-            out = self._capture_output(
-                self.chamber.chamber_stirring_manual_init, stirrer_id)
-            if "failed" in out:
-                self.log.warning("Initialization error: {}".format(out))
-        time.sleep(CHAMBER_SLEEP)
-
-    def _init_continuous_mode(self):
-        self.current_mode = 'continuous'
-        self.chamber.chamber_stirring_continuous_init()
-
-    def _init_stepped_mode(self, steps):
-        self.current_mode = 'stepped'
-        self.current_stepped_pos = 0
-        self.chamber.chamber_stirring_stepped_init(steps, False)
-
-    def set_stirrer_pos(self, stirrer_id, position):
-        if self.current_mode != 'manual':
-            self._init_manual_mode()
-        self.log.info('Setting stirrer {} to {}.'.format(stirrer_id, position))
-        out = self._capture_output(
-            self.chamber.chamber_stirring_manual_set_pos, stirrer_id, position)
-        if "failed" in out:
-            self.log.warning("Bluetest error: {}".format(out))
-            self.log.warning("Set position failed. Retrying.")
-            self.current_mode = None
-            self.set_stirrer_pos(stirrer_id, position)
-        else:
-            self._capture_output(self.chamber.chamber_stirring_manual_wait,
-                                 CHAMBER_SLEEP)
-            self.log.warning('Stirrer {} at {}.'.format(stirrer_id, position))
-
-    def set_orientation(self, orientation):
-        self.set_stirrer_pos(2, orientation * 100 / 360)
-
-    def start_continuous_stirrers(self):
-        if self.current_mode != 'continuous':
-            self._init_continuous_mode()
-        self.chamber.chamber_stirring_continuous_start()
-
-    def stop_continuous_stirrers(self):
-        self.chamber.chamber_stirring_continuous_stop()
-
-    def step_stirrers(self, steps):
-        if self.current_mode != 'stepped':
-            self._init_stepped_mode(steps)
-        if self.current_stepped_pos == 0:
-            self.current_stepped_pos += 1
-            return
-        self.current_stepped_pos += 1
-        self.chamber.chamber_stirring_stepped_next_pos()
-
-    def reset_chamber(self):
-        if self.current_mode == 'continuous':
-            self._init_continuous_mode()
-            time.sleep(SHORT_SLEEP)
-            self._init_continuous_mode()
-        else:
-            self._init_manual_mode()
-
-
-class EInstrumentChamber(OtaChamber):
-    """Class that implements Einstrument Chamber."""
-    def __init__(self, config):
-        self.config = config.copy()
-        self.device_id = self.config['device_id']
-        self.log = logger.create_tagged_trace_logger(
-            'EInstrumentChamber|{}'.format(self.device_id))
-        self.current_mode = None
-        self.ser = self._get_serial(config['port'])
-
-    def _get_serial(self, port, baud=9600):
-        """Read com port.
-
-        Args:
-            port: turn table com port
-            baud: baud rate
-        """
-        ser = serial.Serial(port, baud)
-        return ser
-
-    def set_orientation(self, orientation):
-        if int(orientation) > 360:
-            orientation = int(orientation) % 360
-        elif int(orientation) < 0:
-            orientation = 0
-        self.log.info('Setting orientation to {} degrees.'.format(orientation))
-        orientation = str('DG') + str(orientation) + str(';')
-        self.ser.write(orientation.encode())
-        return orientation
-
-    def reset_chamber(self):
-        self.log.info('Resetting turn table to zero degree')
-        self.set_orientation(0)
diff --git a/src/antlion/test_utils/wifi/ota_sniffer.py b/src/antlion/test_utils/wifi/ota_sniffer.py
deleted file mode 100644
index 4200d24..0000000
--- a/src/antlion/test_utils/wifi/ota_sniffer.py
+++ /dev/null
@@ -1,601 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import csv
-import os
-import posixpath
-import time
-import antlion.test_utils.wifi.wifi_test_utils as wutils
-
-from antlion import context
-from antlion import logger
-from antlion import utils
-from antlion.controllers.utils_lib import ssh
-
-WifiEnums = wutils.WifiEnums
-SNIFFER_TIMEOUT = 6
-
-
-def create(configs):
-    """Factory method for sniffer.
-    Args:
-        configs: list of dicts with sniffer settings.
-        Settings must contain the following : ssh_settings, type, OS, interface.
-
-    Returns:
-        objs: list of sniffer class objects.
-    """
-    objs = []
-    for config in configs:
-        try:
-            if config['type'] == 'tshark':
-                if config['os'] == 'unix':
-                    objs.append(TsharkSnifferOnUnix(config))
-                elif config['os'] == 'linux':
-                    objs.append(TsharkSnifferOnLinux(config))
-                else:
-                    raise RuntimeError('Wrong sniffer config')
-
-            elif config['type'] == 'mock':
-                objs.append(MockSniffer(config))
-        except KeyError:
-            raise KeyError('Invalid sniffer configurations')
-        return objs
-
-
-def destroy(objs):
-    return
-
-
-class OtaSnifferBase(object):
-    """Base class defining common sniffers functions."""
-
-    _log_file_counter = 0
-
-    @property
-    def started(self):
-        raise NotImplementedError('started must be specified.')
-
-    def start_capture(self, network, duration=30):
-        """Starts the sniffer Capture.
-
-        Args:
-            network: dict containing network information such as SSID, etc.
-            duration: duration of sniffer capture in seconds.
-        """
-        raise NotImplementedError('start_capture must be specified.')
-
-    def stop_capture(self, tag=''):
-        """Stops the sniffer Capture.
-
-        Args:
-            tag: string to tag sniffer capture file name with.
-        """
-        raise NotImplementedError('stop_capture must be specified.')
-
-    def _get_remote_dump_path(self):
-        """Returns name of the sniffer dump file."""
-        remote_file_name = 'sniffer_dump.{}'.format(
-            self.sniffer_output_file_type)
-        remote_dump_path = posixpath.join(posixpath.sep, 'tmp',
-                                          remote_file_name)
-        return remote_dump_path
-
-    def _get_full_file_path(self, tag=None):
-        """Returns the full file path for the sniffer capture dump file.
-
-        Returns the full file path (on test machine) for the sniffer capture
-        dump file.
-
-        Args:
-            tag: The tag appended to the sniffer capture dump file .
-        """
-        tags = [tag, 'count', OtaSnifferBase._log_file_counter]
-        out_file_name = 'Sniffer_Capture_%s.%s' % ('_'.join([
-            str(x) for x in tags if x != '' and x is not None
-        ]), self.sniffer_output_file_type)
-        OtaSnifferBase._log_file_counter += 1
-
-        file_path = os.path.join(self.log_path, out_file_name)
-        return file_path
-
-    @property
-    def log_path(self):
-        current_context = context.get_current_context()
-        full_out_dir = os.path.join(current_context.get_full_output_path(),
-                                    'sniffer_captures')
-
-        # Ensure the directory exists.
-        os.makedirs(full_out_dir, exist_ok=True)
-
-        return full_out_dir
-
-
-class MockSniffer(OtaSnifferBase):
-    """Class that implements mock sniffer for test development and debug."""
-    def __init__(self, config):
-        self.log = logger.create_tagged_trace_logger('Mock Sniffer')
-
-    def start_capture(self, network, duration=30):
-        """Starts sniffer capture on the specified machine.
-
-        Args:
-            network: dict of network credentials.
-            duration: duration of the sniff.
-        """
-        self.log.debug('Starting sniffer.')
-
-    def stop_capture(self):
-        """Stops the sniffer.
-
-        Returns:
-            log_file: name of processed sniffer.
-        """
-
-        self.log.debug('Stopping sniffer.')
-        log_file = self._get_full_file_path()
-        with open(log_file, 'w') as file:
-            file.write('this is a sniffer dump.')
-        return log_file
-
-
-class TsharkSnifferBase(OtaSnifferBase):
-    """Class that implements Tshark based sniffer controller. """
-
-    TYPE_SUBTYPE_DICT = {
-        '0': 'Association Requests',
-        '1': 'Association Responses',
-        '2': 'Reassociation Requests',
-        '3': 'Resssociation Responses',
-        '4': 'Probe Requests',
-        '5': 'Probe Responses',
-        '8': 'Beacon',
-        '9': 'ATIM',
-        '10': 'Disassociations',
-        '11': 'Authentications',
-        '12': 'Deauthentications',
-        '13': 'Actions',
-        '24': 'Block ACK Requests',
-        '25': 'Block ACKs',
-        '26': 'PS-Polls',
-        '27': 'RTS',
-        '28': 'CTS',
-        '29': 'ACK',
-        '30': 'CF-Ends',
-        '31': 'CF-Ends/CF-Acks',
-        '32': 'Data',
-        '33': 'Data+CF-Ack',
-        '34': 'Data+CF-Poll',
-        '35': 'Data+CF-Ack+CF-Poll',
-        '36': 'Null',
-        '37': 'CF-Ack',
-        '38': 'CF-Poll',
-        '39': 'CF-Ack+CF-Poll',
-        '40': 'QoS Data',
-        '41': 'QoS Data+CF-Ack',
-        '42': 'QoS Data+CF-Poll',
-        '43': 'QoS Data+CF-Ack+CF-Poll',
-        '44': 'QoS Null',
-        '46': 'QoS CF-Poll (Null)',
-        '47': 'QoS CF-Ack+CF-Poll (Null)'
-    }
-
-    TSHARK_COLUMNS = [
-        'frame_number', 'frame_time_relative', 'mactime', 'frame_len', 'rssi',
-        'channel', 'ta', 'ra', 'bssid', 'type', 'subtype', 'duration', 'seq',
-        'retry', 'pwrmgmt', 'moredata', 'ds', 'phy', 'radio_datarate',
-        'vht_datarate', 'radiotap_mcs_index', 'vht_mcs', 'wlan_data_rate',
-        '11n_mcs_index', '11ac_mcs', '11n_bw', '11ac_bw', 'vht_nss', 'mcs_gi',
-        'vht_gi', 'vht_coding', 'ba_bm', 'fc_status', 'bf_report'
-    ]
-
-    TSHARK_OUTPUT_COLUMNS = [
-        'frame_number', 'frame_time_relative', 'mactime', 'ta', 'ra', 'bssid',
-        'rssi', 'channel', 'frame_len', 'Info', 'radio_datarate',
-        'radiotap_mcs_index', 'pwrmgmt', 'phy', 'vht_nss', 'vht_mcs',
-        'vht_datarate', '11ac_mcs', '11ac_bw', 'vht_gi', 'vht_coding',
-        'wlan_data_rate', '11n_mcs_index', '11n_bw', 'mcs_gi', 'type',
-        'subtype', 'duration', 'seq', 'retry', 'moredata', 'ds', 'ba_bm',
-        'fc_status', 'bf_report'
-    ]
-
-    TSHARK_FIELDS_LIST = [
-        'frame.number', 'frame.time_relative', 'radiotap.mactime', 'frame.len',
-        'radiotap.dbm_antsignal', 'wlan_radio.channel', 'wlan.ta', 'wlan.ra',
-        'wlan.bssid', 'wlan.fc.type', 'wlan.fc.type_subtype', 'wlan.duration',
-        'wlan.seq', 'wlan.fc.retry', 'wlan.fc.pwrmgt', 'wlan.fc.moredata',
-        'wlan.fc.ds', 'wlan_radio.phy', 'radiotap.datarate',
-        'radiotap.vht.datarate.0', 'radiotap.mcs.index', 'radiotap.vht.mcs.0',
-        'wlan_radio.data_rate', 'wlan_radio.11n.mcs_index',
-        'wlan_radio.11ac.mcs', 'wlan_radio.11n.bandwidth',
-        'wlan_radio.11ac.bandwidth', 'radiotap.vht.nss.0', 'radiotap.mcs.gi',
-        'radiotap.vht.gi', 'radiotap.vht.coding.0', 'wlan.ba.bm',
-        'wlan.fcs.status', 'wlan.vht.compressed_beamforming_report.snr'
-    ]
-
-    def __init__(self, config):
-        self.sniffer_proc_pid = None
-        self.log = logger.create_tagged_trace_logger('Tshark Sniffer')
-        self.ssh_config = config['ssh_config']
-        self.sniffer_os = config['os']
-        self.run_as_sudo = config.get('run_as_sudo', False)
-        self.sniffer_output_file_type = config['output_file_type']
-        self.sniffer_snap_length = config['snap_length']
-        self.sniffer_interface = config['interface']
-        self.sniffer_disabled = False
-
-        #Logging into sniffer
-        self.log.info('Logging into sniffer.')
-        self._sniffer_server = ssh.connection.SshConnection(
-            ssh.settings.from_config(self.ssh_config))
-        # Get tshark params
-        self.tshark_fields = self._generate_tshark_fields(
-            self.TSHARK_FIELDS_LIST)
-        self.tshark_path = self._sniffer_server.run('which tshark').stdout
-
-    @property
-    def _started(self):
-        return self.sniffer_proc_pid is not None
-
-    def _scan_for_networks(self):
-        """Scans for wireless networks on the sniffer."""
-        raise NotImplementedError
-
-    def _get_tshark_command(self, duration):
-        """Frames the appropriate tshark command.
-
-        Args:
-            duration: duration to sniff for.
-
-        Returns:
-            tshark_command : appropriate tshark command.
-        """
-        tshark_command = '{} -l -i {} -I -t u -a duration:{}'.format(
-            self.tshark_path, self.sniffer_interface, int(duration))
-        if self.run_as_sudo:
-            tshark_command = 'sudo {}'.format(tshark_command)
-
-        return tshark_command
-
-    def _get_sniffer_command(self, tshark_command):
-        """
-        Frames the appropriate sniffer command.
-
-        Args:
-            tshark_command: framed tshark command
-
-        Returns:
-            sniffer_command: appropriate sniffer command
-        """
-        if self.sniffer_output_file_type in ['pcap', 'pcapng']:
-            sniffer_command = ' {tshark} -s {snaplength} -w {log_file} '.format(
-                tshark=tshark_command,
-                snaplength=self.sniffer_snap_length,
-                log_file=self._get_remote_dump_path())
-
-        elif self.sniffer_output_file_type == 'csv':
-            sniffer_command = '{tshark} {fields} > {log_file}'.format(
-                tshark=tshark_command,
-                fields=self.tshark_fields,
-                log_file=self._get_remote_dump_path())
-
-        else:
-            raise KeyError('Sniffer output file type not configured correctly')
-
-        return sniffer_command
-
-    def _generate_tshark_fields(self, fields):
-        """Generates tshark fields to be appended to the tshark command.
-
-        Args:
-            fields: list of tshark fields to be appended to the tshark command.
-
-        Returns:
-            tshark_fields: string of tshark fields to be appended
-            to the tshark command.
-        """
-        tshark_fields = "-T fields -y IEEE802_11_RADIO -E separator='^'"
-        for field in fields:
-            tshark_fields = tshark_fields + ' -e {}'.format(field)
-        return tshark_fields
-
-    def _configure_sniffer(self, network, chan, bw):
-        """ Connects to a wireless network using networksetup utility.
-
-        Args:
-            network: dictionary of network credentials; SSID and password.
-        """
-        raise NotImplementedError
-
-    def _run_tshark(self, sniffer_command):
-        """Starts the sniffer.
-
-        Args:
-            sniffer_command: sniffer command to execute.
-        """
-        self.log.debug('Starting sniffer.')
-        sniffer_job = self._sniffer_server.run_async(sniffer_command)
-        self.sniffer_proc_pid = sniffer_job.stdout
-
-    def _stop_tshark(self):
-        """ Stops the sniffer."""
-        self.log.debug('Stopping sniffer')
-
-        # while loop to kill the sniffer process
-        stop_time = time.time() + SNIFFER_TIMEOUT
-        while time.time() < stop_time:
-            # Wait before sending more kill signals
-            time.sleep(0.1)
-            try:
-                # Returns 1 if process was killed
-                self._sniffer_server.run(
-                    'ps aux| grep {} | grep -v grep'.format(
-                        self.sniffer_proc_pid))
-            except:
-                return
-            try:
-                # Returns error if process was killed already
-                self._sniffer_server.run('sudo kill -15 {}'.format(
-                    str(self.sniffer_proc_pid)))
-            except:
-                # Except is hit when tshark is already dead but we will break
-                # out of the loop when confirming process is dead using ps aux
-                pass
-        self.log.warning('Could not stop sniffer. Trying with SIGKILL.')
-        try:
-            self.log.debug('Killing sniffer with SIGKILL.')
-            self._sniffer_server.run('sudo kill -9 {}'.format(
-                str(self.sniffer_proc_pid)))
-        except:
-            self.log.debug('Sniffer process may have stopped succesfully.')
-
-    def _process_tshark_dump(self, log_file):
-        """ Process tshark dump for better readability.
-
-        Processes tshark dump for better readability and saves it to a file.
-        Adds an info column at the end of each row. Format of the info columns:
-        subtype of the frame, sequence no and retry status.
-
-        Args:
-            log_file : unprocessed sniffer output
-        Returns:
-            log_file : processed sniffer output
-        """
-        temp_dump_file = os.path.join(self.log_path, 'sniffer_temp_dump.csv')
-        utils.exe_cmd('cp {} {}'.format(log_file, temp_dump_file))
-
-        with open(temp_dump_file, 'r') as input_csv, open(log_file,
-                                                          'w') as output_csv:
-            reader = csv.DictReader(input_csv,
-                                    fieldnames=self.TSHARK_COLUMNS,
-                                    delimiter='^')
-            writer = csv.DictWriter(output_csv,
-                                    fieldnames=self.TSHARK_OUTPUT_COLUMNS,
-                                    delimiter='\t')
-            writer.writeheader()
-            for row in reader:
-                if row['subtype'] in self.TYPE_SUBTYPE_DICT:
-                    row['Info'] = '{sub} S={seq} retry={retry_status}'.format(
-                        sub=self.TYPE_SUBTYPE_DICT[row['subtype']],
-                        seq=row['seq'],
-                        retry_status=row['retry'])
-                else:
-                    row['Info'] = '{} S={} retry={}\n'.format(
-                        row['subtype'], row['seq'], row['retry'])
-                writer.writerow(row)
-
-        utils.exe_cmd('rm -f {}'.format(temp_dump_file))
-        return log_file
-
-    def start_capture(self, network, chan, bw, duration=60):
-        """Starts sniffer capture on the specified machine.
-
-        Args:
-            network: dict describing network to sniff on.
-            duration: duration of sniff.
-        """
-        # Checking for existing sniffer processes
-        if self._started:
-            self.log.debug('Sniffer already running')
-            return
-
-        # Configure sniffer
-        self._configure_sniffer(network, chan, bw)
-        tshark_command = self._get_tshark_command(duration)
-        sniffer_command = self._get_sniffer_command(tshark_command)
-
-        # Starting sniffer capture by executing tshark command
-        self._run_tshark(sniffer_command)
-
-    def stop_capture(self, tag=''):
-        """Stops the sniffer.
-
-        Args:
-            tag: tag to be appended to the sniffer output file.
-        Returns:
-            log_file: path to sniffer dump.
-        """
-        # Checking if there is an ongoing sniffer capture
-        if not self._started:
-            self.log.debug('No sniffer process running')
-            return
-        # Killing sniffer process
-        self._stop_tshark()
-
-        # Processing writing capture output to file
-        log_file = self._get_full_file_path(tag)
-        self._sniffer_server.run('sudo chmod 777 {}'.format(
-            self._get_remote_dump_path()))
-        self._sniffer_server.pull_file(log_file, self._get_remote_dump_path())
-
-        if self.sniffer_output_file_type == 'csv':
-            log_file = self._process_tshark_dump(log_file)
-
-        self.sniffer_proc_pid = None
-        return log_file
-
-
-class TsharkSnifferOnUnix(TsharkSnifferBase):
-    """Class that implements Tshark based sniffer controller on Unix systems."""
-    def _scan_for_networks(self):
-        """Scans the wireless networks on the sniffer.
-
-        Returns:
-            scan_results : output of the scan command.
-        """
-        scan_command = '/usr/local/bin/airport -s'
-        scan_result = self._sniffer_server.run(scan_command).stdout
-
-        return scan_result
-
-    def _configure_sniffer(self, network, chan, bw):
-        """Connects to a wireless network using networksetup utility.
-
-        Args:
-            network: dictionary of network credentials; SSID and password.
-        """
-
-        self.log.debug('Connecting to network {}'.format(network['SSID']))
-
-        if 'password' not in network:
-            network['password'] = ''
-
-        connect_command = 'networksetup -setairportnetwork en0 {} {}'.format(
-            network['SSID'], network['password'])
-        self._sniffer_server.run(connect_command)
-
-
-class TsharkSnifferOnLinux(TsharkSnifferBase):
-    """Class that implements Tshark based sniffer controller on Linux."""
-    def __init__(self, config):
-        super().__init__(config)
-        self._init_sniffer()
-        self.channel = None
-        self.bandwidth = None
-
-    def _init_sniffer(self):
-        """Function to configure interface for the first time"""
-        self._sniffer_server.run('sudo modprobe -r iwlwifi')
-        self._sniffer_server.run('sudo dmesg -C')
-        self._sniffer_server.run('cat /dev/null | sudo tee /var/log/syslog')
-        self._sniffer_server.run('sudo modprobe iwlwifi debug=0x1')
-        # Wait for wifi config changes before trying to further configuration
-        # e.g. setting monitor mode (which will fail if above is not complete)
-        time.sleep(1)
-
-    def start_capture(self, network, chan, bw, duration=60):
-        """Starts sniffer capture on the specified machine.
-
-        Args:
-            network: dict describing network to sniff on.
-            duration: duration of sniff.
-        """
-        # If sniffer doesnt support the channel, return
-        if '6g' in str(chan):
-            self.log.debug('Channel not supported on sniffer')
-            return
-        # Checking for existing sniffer processes
-        if self._started:
-            self.log.debug('Sniffer already running')
-            return
-
-        # Configure sniffer
-        self._configure_sniffer(network, chan, bw)
-        tshark_command = self._get_tshark_command(duration)
-        sniffer_command = self._get_sniffer_command(tshark_command)
-
-        # Starting sniffer capture by executing tshark command
-        self._run_tshark(sniffer_command)
-
-    def set_monitor_mode(self, chan, bw):
-        """Function to configure interface to monitor mode
-
-        Brings up the sniffer wireless interface in monitor mode and
-        tunes it to the appropriate channel and bandwidth
-
-        Args:
-            chan: primary channel (int) to tune the sniffer to
-            bw: bandwidth (int) to tune the sniffer to
-        """
-        if chan == self.channel and bw == self.bandwidth:
-            return
-
-        self.channel = chan
-        self.bandwidth = bw
-
-        channel_map = {
-            80: {
-                tuple(range(36, 50, 2)): 42,
-                tuple(range(52, 66, 2)): 58,
-                tuple(range(100, 114, 2)): 106,
-                tuple(range(116, 130, 2)): 122,
-                tuple(range(132, 146, 2)): 138,
-                tuple(range(149, 163, 2)): 155
-            },
-            40: {
-                (36, 38, 40): 38,
-                (44, 46, 48): 46,
-                (52, 54, 56): 54,
-                (60, 62, 64): 62,
-                (100, 102, 104): 102,
-                (108, 110, 112): 108,
-                (116, 118, 120): 118,
-                (124, 126, 128): 126,
-                (132, 134, 136): 134,
-                (140, 142, 144): 142,
-                (149, 151, 153): 151,
-                (157, 159, 161): 159
-            },
-            160: {
-                (36, 38, 40): 50
-            }
-        }
-
-        if chan <= 13:
-            primary_freq = WifiEnums.channel_2G_to_freq[chan]
-        else:
-            primary_freq = WifiEnums.channel_5G_to_freq[chan]
-
-        self._sniffer_server.run('sudo ifconfig {} down'.format(
-            self.sniffer_interface))
-        self._sniffer_server.run('sudo iwconfig {} mode monitor'.format(
-            self.sniffer_interface))
-        self._sniffer_server.run('sudo ifconfig {} up'.format(
-            self.sniffer_interface))
-
-        if bw in channel_map:
-            for tuple_chan in channel_map[bw]:
-                if chan in tuple_chan:
-                    center_freq = WifiEnums.channel_5G_to_freq[channel_map[bw]
-                                                               [tuple_chan]]
-                    self._sniffer_server.run(
-                        'sudo iw dev {} set freq {} {} {}'.format(
-                            self.sniffer_interface, primary_freq, bw,
-                            center_freq))
-
-        else:
-            self._sniffer_server.run('sudo iw dev {} set freq {}'.format(
-                self.sniffer_interface, primary_freq))
-
-    def _configure_sniffer(self, network, chan, bw):
-        """ Connects to a wireless network using networksetup utility.
-
-        Args:
-            network: dictionary of network credentials; SSID and password.
-        """
-
-        self.log.debug('Setting monitor mode on Ch {}, bw {}'.format(chan, bw))
-        self.set_monitor_mode(chan, bw)
diff --git a/src/antlion/test_utils/wifi/p2p/WifiP2pBaseTest.py b/src/antlion/test_utils/wifi/p2p/WifiP2pBaseTest.py
deleted file mode 100644
index 5fb8081..0000000
--- a/src/antlion/test_utils/wifi/p2p/WifiP2pBaseTest.py
+++ /dev/null
@@ -1,159 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import antlion.utils
-import os
-import re
-import time
-
-from antlion import asserts
-from antlion import utils
-from antlion.base_test import BaseTestClass
-from antlion.keys import Config
-from antlion.test_utils.net import net_test_utils as nutils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.p2p import wifi_p2p_const as p2pconsts
-
-WAIT_TIME = 60
-
-
-class WifiP2pBaseTest(BaseTestClass):
-    def __init__(self, controllers):
-        if not hasattr(self, 'android_devices'):
-            super(WifiP2pBaseTest, self).__init__(controllers)
-
-    def setup_class(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        required_params = ()
-        optional_params = ("skip_read_factory_mac", "pixel_models", "cnss_diag_file")
-        self.unpack_userparams(required_params,
-                               optional_params,
-                               skip_read_factory_mac=0)
-
-        self.dut1 = self.android_devices[0]
-        self.dut2 = self.android_devices[1]
-        if self.skip_read_factory_mac:
-            self.dut1_mac = None
-            self.dut2_mac = None
-        else:
-            self.dut1_mac = self.get_p2p_mac_address(self.dut1)
-            self.dut2_mac = self.get_p2p_mac_address(self.dut2)
-
-        #init location before init p2p
-        antlion.utils.set_location_service(self.dut1, True)
-        antlion.utils.set_location_service(self.dut2, True)
-
-        wutils.wifi_test_device_init(self.dut1)
-        utils.sync_device_time(self.dut1)
-        self.dut1.droid.wifiP2pInitialize()
-        time.sleep(p2pconsts.DEFAULT_FUNCTION_SWITCH_TIME)
-        asserts.assert_true(self.dut1.droid.wifiP2pIsEnabled(),
-                            "DUT1's p2p should be initialized but it didn't")
-        self.dut1.name = "Android_" + self.dut1.serial
-        self.dut1.droid.wifiP2pSetDeviceName(self.dut1.name)
-        wutils.wifi_test_device_init(self.dut2)
-        utils.sync_device_time(self.dut2)
-        self.dut2.droid.wifiP2pInitialize()
-        time.sleep(p2pconsts.DEFAULT_FUNCTION_SWITCH_TIME)
-        asserts.assert_true(self.dut2.droid.wifiP2pIsEnabled(),
-                            "DUT2's p2p should be initialized but it didn't")
-        self.dut2.name = "Android_" + self.dut2.serial
-        self.dut2.droid.wifiP2pSetDeviceName(self.dut2.name)
-
-        if len(self.android_devices) > 2:
-            self.dut3 = self.android_devices[2]
-            antlion.utils.set_location_service(self.dut3, True)
-            wutils.wifi_test_device_init(self.dut3)
-            utils.sync_device_time(self.dut3)
-            self.dut3.droid.wifiP2pInitialize()
-            time.sleep(p2pconsts.DEFAULT_FUNCTION_SWITCH_TIME)
-            asserts.assert_true(
-                self.dut3.droid.wifiP2pIsEnabled(),
-                "DUT3's p2p should be initialized but it didn't")
-            self.dut3.name = "Android_" + self.dut3.serial
-            self.dut3.droid.wifiP2pSetDeviceName(self.dut3.name)
-        if hasattr(self, "cnss_diag_file"):
-            if isinstance(self.cnss_diag_file, list):
-                self.cnss_diag_file = self.cnss_diag_file[0]
-            if not os.path.isfile(self.cnss_diag_file):
-                self.cnss_diag_file = os.path.join(
-                    self.user_params[Config.key_config_path.value],
-                    self.cnss_diag_file)
-
-    def teardown_class(self):
-        self.dut1.droid.wifiP2pClose()
-        self.dut2.droid.wifiP2pClose()
-        antlion.utils.set_location_service(self.dut1, False)
-        antlion.utils.set_location_service(self.dut2, False)
-
-        if len(self.android_devices) > 2:
-            self.dut3.droid.wifiP2pClose()
-            antlion.utils.set_location_service(self.dut3, False)
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-
-    def setup_test(self):
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.start_cnss_diags(
-                self.android_devices, self.cnss_diag_file, self.pixel_models)
-        self.tcpdump_proc = []
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                proc = nutils.start_tcpdump(ad, self.test_name)
-                self.tcpdump_proc.append((ad, proc))
-
-        for ad in self.android_devices:
-            ad.ed.clear_all_events()
-
-    def teardown_test(self):
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(
-                    proc[0], proc[1], self.test_name, pull_dump=False)
-        self.tcpdump_proc = []
-        for ad in self.android_devices:
-            # Clear p2p group info
-            ad.droid.wifiP2pRequestPersistentGroupInfo()
-            event = ad.ed.pop_event("WifiP2pOnPersistentGroupInfoAvailable",
-                                    p2pconsts.DEFAULT_TIMEOUT)
-            for network in event['data']:
-                ad.droid.wifiP2pDeletePersistentGroup(network['NetworkId'])
-            # Clear p2p local service
-            ad.droid.wifiP2pClearLocalServices()
-
-    def on_fail(self, test_name, begin_time):
-        for ad in self.android_devices:
-            ad.take_bug_report(test_name, begin_time)
-            ad.cat_adb_log(test_name, begin_time)
-            wutils.get_ssrdumps(ad)
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-            for ad in self.android_devices:
-                wutils.get_cnss_diag_log(ad)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(proc[0], proc[1], self.test_name)
-        self.tcpdump_proc = []
-
-    def get_p2p_mac_address(self, dut):
-        """Gets the current MAC address being used for Wi-Fi Direct."""
-        dut.reboot()
-        time.sleep(WAIT_TIME)
-        out = dut.adb.shell("ifconfig p2p0")
-        return re.match(".* HWaddr (\S+).*", out, re.S).group(1)
diff --git a/src/antlion/test_utils/wifi/p2p/__init__.py b/src/antlion/test_utils/wifi/p2p/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/wifi/p2p/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/wifi/p2p/wifi_p2p_const.py b/src/antlion/test_utils/wifi/p2p/wifi_p2p_const.py
deleted file mode 100644
index eb19776..0000000
--- a/src/antlion/test_utils/wifi/p2p/wifi_p2p_const.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-######################################################
-# Wifi P2p framework designed value
-######################################################
-P2P_FIND_TIMEOUT = 120
-GO_IP_ADDRESS = '192.168.49.1'
-
-######################################################
-# Wifi P2p Acts flow control timer value
-######################################################
-
-DEFAULT_TIMEOUT = 30
-DEFAULT_CONNECT_SLEEPTIME = 3
-DEFAULT_POLLING_SLEEPTIME = 1
-DEFAULT_SLEEPTIME = 5
-DEFAULT_FUNCTION_SWITCH_TIME = 10
-DEFAULT_SERVICE_WAITING_TIME = 20
-DEFAULT_GROUP_CLIENT_LOST_TIME = 60
-
-P2P_CONNECT_NEGOTIATION = 0
-P2P_CONNECT_JOIN = 1
-P2P_CONNECT_INVITATION = 2
-######################################################
-# Wifi P2p sl4a Event String
-######################################################
-CONNECTED_EVENT = "WifiP2pConnected"
-DISCONNECTED_EVENT = "WifiP2pDisconnected"
-PEER_AVAILABLE_EVENT = "WifiP2pOnPeersAvailable"
-CONNECTION_INFO_AVAILABLE_EVENT = "WifiP2pOnConnectionInfoAvailable"
-ONGOING_PEER_INFO_AVAILABLE_EVENT = "WifiP2pOnOngoingPeerAvailable"
-ONGOING_PEER_SET_SUCCESS_EVENT = "WifiP2psetP2pPeerConfigureOnSuccess"
-CONNECT_SUCCESS_EVENT = "WifiP2pConnectOnSuccess"
-CREATE_GROUP_SUCCESS_EVENT = "WifiP2pCreateGroupOnSuccess"
-SET_CHANNEL_SUCCESS_EVENT = "WifiP2pSetChannelsOnSuccess"
-GROUP_INFO_AVAILABLE_EVENT = "WifiP2pOnGroupInfoAvailable"
-
-######################################################
-# Wifi P2p local service event
-####################################################
-
-DNSSD_EVENT = "WifiP2pOnDnsSdServiceAvailable"
-DNSSD_TXRECORD_EVENT = "WifiP2pOnDnsSdTxtRecordAvailable"
-UPNP_EVENT = "WifiP2pOnUpnpServiceAvailable"
-
-DNSSD_EVENT_INSTANCENAME_KEY = "InstanceName"
-DNSSD_EVENT_REGISTRATIONTYPE_KEY = "RegistrationType"
-DNSSD_TXRECORD_EVENT_FULLDOMAINNAME_KEY = "FullDomainName"
-DNSSD_TXRECORD_EVENT_TXRECORDMAP_KEY = "TxtRecordMap"
-UPNP_EVENT_SERVICELIST_KEY = "ServiceList"
-
-######################################################
-# Wifi P2p local service type
-####################################################
-P2P_LOCAL_SERVICE_UPNP = 0
-P2P_LOCAL_SERVICE_IPP = 1
-P2P_LOCAL_SERVICE_AFP = 2
-
-######################################################
-# Wifi P2p group capability
-######################################################
-P2P_GROUP_CAPAB_GROUP_OWNER = 1
-
-
-######################################################
-# Wifi P2p UPnP MediaRenderer local service
-######################################################
-class UpnpTestData():
-    AVTransport = "urn:schemas-upnp-org:service:AVTransport:1"
-    ConnectionManager = "urn:schemas-upnp-org:service:ConnectionManager:1"
-    serviceType = "urn:schemas-upnp-org:device:MediaRenderer:1"
-    uuid = "6859dede-8574-59ab-9332-123456789011"
-    rootdevice = "upnp:rootdevice"
-
-
-######################################################
-# Wifi P2p Bonjour IPP & AFP local service
-######################################################
-class IppTestData():
-    ippInstanceName = "MyPrinter"
-    ippRegistrationType = "_ipp._tcp"
-    ippDomainName = "myprinter._ipp._tcp.local."
-    ipp_txtRecord = {"txtvers": "1", "pdl": "application/postscript"}
-
-
-class AfpTestData():
-    afpInstanceName = "Example"
-    afpRegistrationType = "_afpovertcp._tcp"
-    afpDomainName = "example._afpovertcp._tcp.local."
-    afp_txtRecord = {}
diff --git a/src/antlion/test_utils/wifi/p2p/wifi_p2p_test_utils.py b/src/antlion/test_utils/wifi/p2p/wifi_p2p_test_utils.py
deleted file mode 100755
index f9144b5..0000000
--- a/src/antlion/test_utils/wifi/p2p/wifi_p2p_test_utils.py
+++ /dev/null
@@ -1,739 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion import asserts
-from antlion import utils
-from antlion.test_utils.wifi.p2p import wifi_p2p_const as p2pconsts
-import antlion.utils
-
-
-def is_discovered(event, ad):
-    """Check an Android device exist in WifiP2pOnPeersAvailable event or not.
-
-    Args:
-        event: WifiP2pOnPeersAvailable which include all of p2p devices.
-        ad: The android device
-    Returns:
-        True: if an Android device exist in p2p list
-        False: if not exist
-    """
-    for device in event['data']['Peers']:
-        if device['Name'] == ad.name:
-            ad.deviceAddress = device['Address']
-            return True
-    return False
-
-
-def check_disconnect(ad, timeout=p2pconsts.DEFAULT_TIMEOUT):
-    """Check an Android device disconnect or not
-
-    Args:
-        ad: The android device
-    """
-    ad.droid.wifiP2pRequestConnectionInfo()
-    # wait disconnect event
-    ad.ed.pop_event(p2pconsts.DISCONNECTED_EVENT, timeout)
-
-
-def p2p_disconnect(ad):
-    """Invoke an Android device removeGroup to trigger p2p disconnect
-
-    Args:
-        ad: The android device
-    """
-    ad.log.debug("Disconnect")
-    ad.droid.wifiP2pRemoveGroup()
-    check_disconnect(ad)
-
-
-def p2p_connection_ping_test(ad, target_ip_address):
-    """Let an Android device to start ping target_ip_address
-
-    Args:
-        ad: The android device
-        target_ip_address: ip address which would like to ping
-    """
-    ad.log.debug("Run Ping Test, %s ping %s " % (ad.serial, target_ip_address))
-    asserts.assert_true(
-        antlion.utils.adb_shell_ping(ad,
-                                  count=6,
-                                  dest_ip=target_ip_address,
-                                  timeout=20), "%s ping failed" % (ad.serial))
-
-
-def is_go(ad):
-    """Check an Android p2p role is Go or not
-
-    Args:
-        ad: The android device
-    Return:
-        True: An Android device is p2p  go
-        False: An Android device is p2p gc
-    """
-    ad.log.debug("is go check")
-    ad.droid.wifiP2pRequestConnectionInfo()
-    ad_connect_info_event = ad.ed.pop_event(
-        p2pconsts.CONNECTION_INFO_AVAILABLE_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-    if ad_connect_info_event['data']['isGroupOwner']:
-        return True
-    return False
-
-
-def p2p_go_ip(ad):
-    """Get GO IP address
-
-    Args:
-        ad: The android device
-    Return:
-        GO IP address
-    """
-    ad.log.debug("p2p go ip")
-    ad.droid.wifiP2pRequestConnectionInfo()
-    ad_connect_info_event = ad.ed.pop_event(
-        p2pconsts.CONNECTION_INFO_AVAILABLE_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-    ad.log.debug("p2p go ip: %s" %
-                 ad_connect_info_event['data']['groupOwnerHostAddress'])
-    return ad_connect_info_event['data']['groupOwnerHostAddress']
-
-
-def p2p_get_current_group(ad):
-    """Get current group information
-
-    Args:
-        ad: The android device
-    Return:
-        p2p group information
-    """
-    ad.log.debug("get current group")
-    ad.droid.wifiP2pRequestGroupInfo()
-    ad_group_info_event = ad.ed.pop_event(p2pconsts.GROUP_INFO_AVAILABLE_EVENT,
-                                          p2pconsts.DEFAULT_TIMEOUT)
-    ad.log.debug(
-        "p2p group: SSID:%s, password:%s, owner address: %s, interface: %s" %
-        (ad_group_info_event['data']['NetworkName'],
-         ad_group_info_event['data']['Passphrase'],
-         ad_group_info_event['data']['OwnerAddress'],
-         ad_group_info_event['data']['Interface']))
-    return ad_group_info_event['data']
-
-
-def is_ongoing_peer_ready(peerConfig, waitForPin):
-    """Check whether the peer config is ready
-
-    Args:
-        peerConfig: the ongoing config
-        waitForPin: this config needs key or not
-    Return:
-        true for ready; false otherwise.
-    """
-    if peerConfig is None:
-        return False
-    if not peerConfig['data'][WifiP2PEnums.WifiP2pConfig.DEVICEADDRESS_KEY]:
-        return False
-    if not waitForPin:
-        return True
-    if WifiP2PEnums.WpsInfo.WPS_PIN_KEY in peerConfig['data'][
-            WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY]:
-        return True
-    return False
-
-
-def wait_for_ongoing_peer_ready(ad, waitForPin, maxPollingCount):
-    """wait for the ongoing peer data ready
-
-    Args:
-        ad: The android device
-        waitForPin: this config needs key or not
-        maxPollingCount: the max polling count
-    Return:
-        the ongoing peer config
-    """
-    ad_peerConfig = None
-    ad.log.debug("%s is waiting for the ongoing peer, max polling count %s" %
-                 (ad.name, maxPollingCount))
-    while maxPollingCount > 0:
-        ad.droid.requestP2pPeerConfigure()
-        ad_peerConfig = ad.ed.pop_event(
-            p2pconsts.ONGOING_PEER_INFO_AVAILABLE_EVENT,
-            p2pconsts.DEFAULT_TIMEOUT)
-        maxPollingCount -= 1
-        if is_ongoing_peer_ready(ad_peerConfig, waitForPin):
-            break
-        ad.log.debug("%s is not ready for next step" % (ad.name))
-        time.sleep(p2pconsts.DEFAULT_POLLING_SLEEPTIME)
-    asserts.assert_true(
-        ad_peerConfig['data'][WifiP2PEnums.WifiP2pConfig.DEVICEADDRESS_KEY],
-        "DUT %s does not receive the request." % (ad.name))
-    ad.log.debug(ad_peerConfig['data'])
-    return ad_peerConfig
-
-
-#trigger p2p connect to ad2 from ad1
-def p2p_connect(ad1,
-                ad2,
-                isReconnect,
-                wpsSetup,
-                p2p_connect_type=p2pconsts.P2P_CONNECT_NEGOTIATION,
-                go_ad=None):
-    """trigger p2p connect to ad2 from ad1
-
-    Args:
-        ad1: The android device
-        ad2: The android device
-        isReconnect: boolean, if persist group is exist,
-                isReconnect is true, otherswise is false.
-        wpsSetup: which wps connection would like to use
-        p2p_connect_type: enumeration, which type this p2p connection is
-        go_ad: The group owner android device which is used for the invitation connection
-    """
-    ad1.log.info("Create p2p connection from %s to %s via wps: %s type %d" %
-                 (ad1.name, ad2.name, wpsSetup, p2p_connect_type))
-    if p2p_connect_type == p2pconsts.P2P_CONNECT_INVITATION:
-        if go_ad is None:
-            go_ad = ad1
-        find_p2p_device(ad1, ad2)
-        # GO might be another peer, so ad2 needs to find it first.
-        find_p2p_group_owner(ad2, go_ad)
-    elif p2p_connect_type == p2pconsts.P2P_CONNECT_JOIN:
-        find_p2p_group_owner(ad1, ad2)
-    else:
-        find_p2p_device(ad1, ad2)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-    wifi_p2p_config = {
-        WifiP2PEnums.WifiP2pConfig.DEVICEADDRESS_KEY: ad2.deviceAddress,
-        WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY: {
-            WifiP2PEnums.WpsInfo.WPS_SETUP_KEY: wpsSetup
-        }
-    }
-    ad1.droid.wifiP2pConnect(wifi_p2p_config)
-    ad1.ed.pop_event(p2pconsts.CONNECT_SUCCESS_EVENT,
-                     p2pconsts.DEFAULT_TIMEOUT)
-    if not isReconnect:
-        # ad1 is the initiator, it should be ready soon.
-        ad1_peerConfig = wait_for_ongoing_peer_ready(
-            ad1, wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_DISPLAY, 6)
-        # auto-join tries 10 times to find groups, and
-        # one round takes 2 - 3 seconds.
-        ad2_peerConfig = wait_for_ongoing_peer_ready(
-            ad2, wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_KEYPAD, 31)
-        if wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_DISPLAY:
-            asserts.assert_true(
-                WifiP2PEnums.WpsInfo.WPS_PIN_KEY in ad1_peerConfig['data'][
-                    WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY],
-                "Can't get display pin value")
-            ad2_peerConfig['data'][WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY][
-                WifiP2PEnums.WpsInfo.WPS_PIN_KEY] = ad1_peerConfig['data'][
-                    WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY][
-                        WifiP2PEnums.WpsInfo.WPS_PIN_KEY]
-            ad2.droid.setP2pPeerConfigure(ad2_peerConfig['data'])
-            ad2.ed.pop_event(p2pconsts.ONGOING_PEER_SET_SUCCESS_EVENT,
-                             p2pconsts.DEFAULT_TIMEOUT)
-            ad2.droid.wifiP2pAcceptConnection()
-        elif wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_KEYPAD:
-            asserts.assert_true(
-                WifiP2PEnums.WpsInfo.WPS_PIN_KEY in ad2_peerConfig['data'][
-                    WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY],
-                "Can't get keypad pin value")
-            ad1_peerConfig['data'][WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY][
-                WifiP2PEnums.WpsInfo.WPS_PIN_KEY] = ad2_peerConfig['data'][
-                    WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY][
-                        WifiP2PEnums.WpsInfo.WPS_PIN_KEY]
-            ad1.droid.setP2pPeerConfigure(ad1_peerConfig['data'])
-            ad1.ed.pop_event(p2pconsts.ONGOING_PEER_SET_SUCCESS_EVENT,
-                             p2pconsts.DEFAULT_TIMEOUT)
-            ad1.droid.wifiP2pAcceptConnection()
-            time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-            ad2.droid.wifiP2pConfirmConnection()
-        elif wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_PBC:
-            ad2.droid.wifiP2pAcceptConnection()
-            if p2p_connect_type == p2pconsts.P2P_CONNECT_INVITATION:
-                time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-                go_ad.droid.wifiP2pAcceptConnection()
-
-    #wait connected event
-    if p2p_connect_type == p2pconsts.P2P_CONNECT_INVITATION:
-        go_ad.ed.pop_event(p2pconsts.CONNECTED_EVENT,
-                           p2pconsts.DEFAULT_TIMEOUT)
-    else:
-        ad1.ed.pop_event(p2pconsts.CONNECTED_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-    ad2.ed.pop_event(p2pconsts.CONNECTED_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-
-
-def p2p_connect_with_config(ad1, ad2, network_name, passphrase, band):
-    """trigger p2p connect to ad2 from ad1 with config
-
-    Args:
-        ad1: The android device
-        ad2: The android device
-        network_name: the network name of the desired group.
-        passphrase: the passphrase of the desired group.
-        band: the operating band of the desired group.
-    """
-    ad1.log.info("Create p2p connection from %s to %s" % (ad1.name, ad2.name))
-    find_p2p_device(ad1, ad2)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-    wifi_p2p_config = {
-        WifiP2PEnums.WifiP2pConfig.NETWORK_NAME: network_name,
-        WifiP2PEnums.WifiP2pConfig.PASSPHRASE: passphrase,
-        WifiP2PEnums.WifiP2pConfig.GROUP_BAND: band,
-        WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY: {
-            WifiP2PEnums.WpsInfo.WPS_SETUP_KEY:
-            WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_PBC
-        }
-    }
-    ad1.droid.wifiP2pConnect(wifi_p2p_config)
-    ad1.ed.pop_event(p2pconsts.CONNECT_SUCCESS_EVENT,
-                     p2pconsts.DEFAULT_TIMEOUT)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-
-    #wait connected event
-    ad1.ed.pop_event(p2pconsts.CONNECTED_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-    ad2.ed.pop_event(p2pconsts.CONNECTED_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-
-
-def find_p2p_device(ad1, ad2):
-    """Check an Android device ad1 can discover an Android device ad2
-
-    Args:
-        ad1: The android device
-        ad2: The android device
-    """
-    ad1.droid.wifiP2pDiscoverPeers()
-    ad2.droid.wifiP2pDiscoverPeers()
-    p2p_find_result = False
-    ad1.ed.clear_events(p2pconsts.PEER_AVAILABLE_EVENT)
-    while not p2p_find_result:
-        ad1_event = ad1.ed.pop_event(p2pconsts.PEER_AVAILABLE_EVENT,
-                                     p2pconsts.P2P_FIND_TIMEOUT)
-        ad1.log.debug(ad1_event['data'])
-        p2p_find_result = is_discovered(ad1_event, ad2)
-    asserts.assert_true(p2p_find_result,
-                        "DUT didn't discovered peer:%s device" % (ad2.name))
-
-
-def find_p2p_group_owner(ad1, ad2):
-    """Check an Android device ad1 can discover an Android device ad2 which
-       is a group owner
-
-    Args:
-        ad1: The android device
-        ad2: The android device which is a group owner
-    """
-    p2p_find_result = False
-    ad1.ed.clear_events(p2pconsts.PEER_AVAILABLE_EVENT)
-    while not p2p_find_result:
-        ad2.droid.wifiP2pStopPeerDiscovery()
-        ad1.droid.wifiP2pStopPeerDiscovery()
-        ad2.droid.wifiP2pDiscoverPeers()
-        ad1.droid.wifiP2pDiscoverPeers()
-        ad1_event = ad1.ed.pop_event(p2pconsts.PEER_AVAILABLE_EVENT,
-                                     p2pconsts.P2P_FIND_TIMEOUT)
-        ad1.log.debug(ad1_event['data'])
-        for device in ad1_event['data']['Peers']:
-            if (device['Name'] == ad2.name and int(device['GroupCapability'])
-                    & p2pconsts.P2P_GROUP_CAPAB_GROUP_OWNER):
-                ad2.deviceAddress = device['Address']
-                p2p_find_result = True
-    asserts.assert_true(
-        p2p_find_result,
-        "DUT didn't discovered group owner peer:%s device" % (ad2.name))
-
-
-def createP2pLocalService(ad, serviceCategory):
-    """Based on serviceCategory to create p2p local service
-            on an Android device ad
-
-    Args:
-        ad: The android device
-        serviceCategory: p2p local service type, UPNP / IPP / AFP,
-    """
-    testData = genTestData(serviceCategory)
-    if serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_UPNP:
-        ad.droid.wifiP2pCreateUpnpServiceInfo(testData[0], testData[1],
-                                              testData[2])
-    elif (serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_IPP
-          or serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_AFP):
-        ad.droid.wifiP2pCreateBonjourServiceInfo(testData[0], testData[1],
-                                                 testData[2])
-    ad.droid.wifiP2pAddLocalService()
-
-
-def requestServiceAndCheckResult(ad_serviceProvider, ad_serviceReceiver,
-                                 serviceType, queryString1, queryString2):
-    """Based on serviceType and query info, check service request result
-            same as expect or not on an Android device ad_serviceReceiver.
-            And remove p2p service request after result check.
-
-    Args:
-        ad_serviceProvider: The android device which provide p2p local service
-        ad_serviceReceiver: The android device which query p2p local service
-        serviceType: P2p local service type, Upnp or Bonjour
-        queryString1: Query String, NonNull
-        queryString2: Query String, used for Bonjour, Nullable
-    """
-    expectData = genExpectTestData(serviceType, queryString1, queryString2)
-    find_p2p_device(ad_serviceReceiver, ad_serviceProvider)
-    ad_serviceReceiver.droid.wifiP2pStopPeerDiscovery()
-    ad_serviceReceiver.droid.wifiP2pClearServiceRequests()
-    time.sleep(p2pconsts.DEFAULT_FUNCTION_SWITCH_TIME)
-
-    ad_serviceReceiver.droid.wifiP2pDiscoverServices()
-    serviceData = {}
-    service_id = 0
-    if (serviceType ==
-            WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_BONJOUR):
-        ad_serviceReceiver.log.info(
-            "Request bonjour service in \
-                %s with Query String %s and %s " %
-            (ad_serviceReceiver.name, queryString1, queryString2))
-        ad_serviceReceiver.log.info("expectData %s" % expectData)
-        if queryString1 != None:
-            service_id = ad_serviceReceiver.droid.wifiP2pAddDnssdServiceRequest(
-                queryString1, queryString2)
-        else:
-            service_id = ad_serviceReceiver.droid.wifiP2pAddServiceRequest(
-                serviceType)
-            ad_serviceReceiver.log.info("request bonjour service id %s" %
-                                        service_id)
-        ad_serviceReceiver.droid.wifiP2pSetDnsSdResponseListeners()
-        ad_serviceReceiver.droid.wifiP2pDiscoverServices()
-        ad_serviceReceiver.log.info("Check Service Listener")
-        time.sleep(p2pconsts.DEFAULT_SERVICE_WAITING_TIME)
-        try:
-            dnssd_events = ad_serviceReceiver.ed.pop_all(p2pconsts.DNSSD_EVENT)
-            dnssd_txrecord_events = ad_serviceReceiver.ed.pop_all(
-                p2pconsts.DNSSD_TXRECORD_EVENT)
-            dns_service = WifiP2PEnums.WifiP2pDnsSdServiceResponse()
-            for dnssd_event in dnssd_events:
-                if dnssd_event['data'][
-                        'SourceDeviceAddress'] == ad_serviceProvider.deviceAddress:
-                    dns_service.InstanceName = dnssd_event['data'][
-                        p2pconsts.DNSSD_EVENT_INSTANCENAME_KEY]
-                    dns_service.RegistrationType = dnssd_event['data'][
-                        p2pconsts.DNSSD_EVENT_REGISTRATIONTYPE_KEY]
-                    dns_service.FullDomainName = ""
-                    dns_service.TxtRecordMap = ""
-                    serviceData[dns_service.toString()] = 1
-            for dnssd_txrecord_event in dnssd_txrecord_events:
-                if dnssd_txrecord_event['data'][
-                        'SourceDeviceAddress'] == ad_serviceProvider.deviceAddress:
-                    dns_service.InstanceName = ""
-                    dns_service.RegistrationType = ""
-                    dns_service.FullDomainName = dnssd_txrecord_event['data'][
-                        p2pconsts.DNSSD_TXRECORD_EVENT_FULLDOMAINNAME_KEY]
-                    dns_service.TxtRecordMap = dnssd_txrecord_event['data'][
-                        p2pconsts.DNSSD_TXRECORD_EVENT_TXRECORDMAP_KEY]
-                    serviceData[dns_service.toString()] = 1
-            ad_serviceReceiver.log.info("serviceData %s" % serviceData)
-            if len(serviceData) == 0:
-                ad_serviceReceiver.droid.wifiP2pRemoveServiceRequest(
-                    service_id)
-                return -1
-        except queue.Empty as error:
-            ad_serviceReceiver.log.info("dnssd event is empty", )
-    elif (serviceType ==
-          WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_UPNP):
-        ad_serviceReceiver.log.info(
-            "Request upnp service in %s with Query String %s " %
-            (ad_serviceReceiver.name, queryString1))
-        ad_serviceReceiver.log.info("expectData %s" % expectData)
-        if queryString1 != None:
-            service_id = ad_serviceReceiver.droid.wifiP2pAddUpnpServiceRequest(
-                queryString1)
-        else:
-            service_id = ad_serviceReceiver.droid.wifiP2pAddServiceRequest(
-                WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_UPNP)
-        ad_serviceReceiver.droid.wifiP2pSetUpnpResponseListeners()
-        ad_serviceReceiver.droid.wifiP2pDiscoverServices()
-        ad_serviceReceiver.log.info("Check Service Listener")
-        time.sleep(p2pconsts.DEFAULT_SERVICE_WAITING_TIME)
-        try:
-            upnp_events = ad_serviceReceiver.ed.pop_all(p2pconsts.UPNP_EVENT)
-            for upnp_event in upnp_events:
-                if upnp_event['data']['Device'][
-                        'Address'] == ad_serviceProvider.deviceAddress:
-                    for service in upnp_event['data'][
-                            p2pconsts.UPNP_EVENT_SERVICELIST_KEY]:
-                        serviceData[service] = 1
-            ad_serviceReceiver.log.info("serviceData %s" % serviceData)
-            if len(serviceData) == 0:
-                ad_serviceReceiver.droid.wifiP2pRemoveServiceRequest(
-                    service_id)
-                return -1
-        except queue.Empty as error:
-            ad_serviceReceiver.log.info("p2p upnp event is empty", )
-
-    ad_serviceReceiver.log.info("Check ServiceList")
-    asserts.assert_true(checkServiceQueryResult(serviceData, expectData),
-                        "ServiceList not same as Expect")
-    # After service checked, remove the service_id
-    ad_serviceReceiver.droid.wifiP2pRemoveServiceRequest(service_id)
-    return 0
-
-
-def requestServiceAndCheckResultWithRetry(ad_serviceProvider,
-                                          ad_serviceReceiver,
-                                          serviceType,
-                                          queryString1,
-                                          queryString2,
-                                          retryCount=3):
-    """ allow failures for requestServiceAndCheckResult. Service
-        discovery might fail unexpectedly because the request packet might not be
-        recevied by the service responder due to p2p state switch.
-
-    Args:
-        ad_serviceProvider: The android device which provide p2p local service
-        ad_serviceReceiver: The android device which query p2p local service
-        serviceType: P2p local service type, Upnp or Bonjour
-        queryString1: Query String, NonNull
-        queryString2: Query String, used for Bonjour, Nullable
-        retryCount: maximum retry count, default is 3
-    """
-    ret = 0
-    while retryCount > 0:
-        ret = requestServiceAndCheckResult(ad_serviceProvider,
-                                           ad_serviceReceiver, serviceType,
-                                           queryString1, queryString2)
-        if (ret == 0):
-            break
-        retryCount -= 1
-
-    asserts.assert_equal(0, ret, "cannot find any services with retries.")
-
-
-def checkServiceQueryResult(serviceList, expectServiceList):
-    """Check serviceList same as expectServiceList or not
-
-    Args:
-        serviceList: ServiceList which get from query result
-        expectServiceList: ServiceList which hardcode in genExpectTestData
-    Return:
-        True: serviceList  same as expectServiceList
-        False:Exist discrepancy between serviceList and expectServiceList
-    """
-    tempServiceList = serviceList.copy()
-    tempExpectServiceList = expectServiceList.copy()
-    for service in serviceList.keys():
-        if service in expectServiceList:
-            del tempServiceList[service]
-            del tempExpectServiceList[service]
-    return len(tempExpectServiceList) == 0 and len(tempServiceList) == 0
-
-
-def genTestData(serviceCategory):
-    """Based on serviceCategory to generator Test Data
-
-    Args:
-        serviceCategory: P2p local service type, Upnp or Bonjour
-    Return:
-        TestData
-    """
-    testData = []
-    if serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_UPNP:
-        testData.append(p2pconsts.UpnpTestData.uuid)
-        testData.append(p2pconsts.UpnpTestData.serviceType)
-        testData.append([
-            p2pconsts.UpnpTestData.AVTransport,
-            p2pconsts.UpnpTestData.ConnectionManager
-        ])
-    elif serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_IPP:
-        testData.append(p2pconsts.IppTestData.ippInstanceName)
-        testData.append(p2pconsts.IppTestData.ippRegistrationType)
-        testData.append(p2pconsts.IppTestData.ipp_txtRecord)
-    elif serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_AFP:
-        testData.append(p2pconsts.AfpTestData.afpInstanceName)
-        testData.append(p2pconsts.AfpTestData.afpRegistrationType)
-        testData.append(p2pconsts.AfpTestData.afp_txtRecord)
-
-    return testData
-
-
-def genExpectTestData(serviceType, queryString1, queryString2):
-    """Based on serviceCategory to generator expect serviceList
-
-    Args:
-        serviceType: P2p local service type, Upnp or Bonjour
-        queryString1: Query String, NonNull
-        queryString2: Query String, used for Bonjour, Nullable
-    Return:
-        expectServiceList
-    """
-    expectServiceList = {}
-    if (serviceType ==
-            WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_BONJOUR):
-        ipp_service = WifiP2PEnums.WifiP2pDnsSdServiceResponse()
-        afp_service = WifiP2PEnums.WifiP2pDnsSdServiceResponse()
-        if queryString1 == p2pconsts.IppTestData.ippRegistrationType:
-            if queryString2 == p2pconsts.IppTestData.ippInstanceName:
-                ipp_service.InstanceName = ""
-                ipp_service.RegistrationType = ""
-                ipp_service.FullDomainName = p2pconsts.IppTestData.ippDomainName
-                ipp_service.TxtRecordMap = p2pconsts.IppTestData.ipp_txtRecord
-                expectServiceList[ipp_service.toString()] = 1
-                return expectServiceList
-            ipp_service.InstanceName = p2pconsts.IppTestData.ippInstanceName
-            ipp_service.RegistrationType = (
-                p2pconsts.IppTestData.ippRegistrationType + ".local.")
-            ipp_service.FullDomainName = ""
-            ipp_service.TxtRecordMap = ""
-            expectServiceList[ipp_service.toString()] = 1
-            return expectServiceList
-        elif queryString1 == p2pconsts.AfpTestData.afpRegistrationType:
-            if queryString2 == p2pconsts.AfpTestData.afpInstanceName:
-                afp_service.InstanceName = ""
-                afp_service.RegistrationType = ""
-                afp_service.FullDomainName = p2pconsts.AfpTestData.afpDomainName
-                afp_service.TxtRecordMap = p2pconsts.AfpTestData.afp_txtRecord
-                expectServiceList[afp_service.toString()] = 1
-                return expectServiceList
-        ipp_service.InstanceName = p2pconsts.IppTestData.ippInstanceName
-        ipp_service.RegistrationType = (
-            p2pconsts.IppTestData.ippRegistrationType + ".local.")
-        ipp_service.FullDomainName = ""
-        ipp_service.TxtRecordMap = ""
-        expectServiceList[ipp_service.toString()] = 1
-
-        ipp_service.InstanceName = ""
-        ipp_service.RegistrationType = ""
-        ipp_service.FullDomainName = p2pconsts.IppTestData.ippDomainName
-        ipp_service.TxtRecordMap = p2pconsts.IppTestData.ipp_txtRecord
-        expectServiceList[ipp_service.toString()] = 1
-
-        afp_service.InstanceName = p2pconsts.AfpTestData.afpInstanceName
-        afp_service.RegistrationType = (
-            p2pconsts.AfpTestData.afpRegistrationType + ".local.")
-        afp_service.FullDomainName = ""
-        afp_service.TxtRecordMap = ""
-        expectServiceList[afp_service.toString()] = 1
-
-        afp_service.InstanceName = ""
-        afp_service.RegistrationType = ""
-        afp_service.FullDomainName = p2pconsts.AfpTestData.afpDomainName
-        afp_service.TxtRecordMap = p2pconsts.AfpTestData.afp_txtRecord
-        expectServiceList[afp_service.toString()] = 1
-
-        return expectServiceList
-    elif serviceType == WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_UPNP:
-        upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid + "::" + (
-            p2pconsts.UpnpTestData.rootdevice)
-        expectServiceList[upnp_service] = 1
-        if queryString1 != "upnp:rootdevice":
-            upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid + (
-                "::" + p2pconsts.UpnpTestData.AVTransport)
-            expectServiceList[upnp_service] = 1
-            upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid + (
-                "::" + p2pconsts.UpnpTestData.ConnectionManager)
-            expectServiceList[upnp_service] = 1
-            upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid + (
-                "::" + p2pconsts.UpnpTestData.serviceType)
-            expectServiceList[upnp_service] = 1
-            upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid
-            expectServiceList[upnp_service] = 1
-
-    return expectServiceList
-
-
-def p2p_create_group(ad):
-    """Create a group as Group Owner
-
-    Args:
-        ad: The android device
-    """
-    ad.droid.wifiP2pCreateGroup()
-    ad.ed.pop_event(p2pconsts.CREATE_GROUP_SUCCESS_EVENT,
-                    p2pconsts.DEFAULT_TIMEOUT)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-
-
-def p2p_create_group_with_config(ad, network_name, passphrase, band):
-    """Create a group as Group Owner
-
-    Args:
-        ad: The android device
-    """
-    wifi_p2p_config = {
-        WifiP2PEnums.WifiP2pConfig.NETWORK_NAME: network_name,
-        WifiP2PEnums.WifiP2pConfig.PASSPHRASE: passphrase,
-        WifiP2PEnums.WifiP2pConfig.GROUP_BAND: band,
-        WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY: {
-            WifiP2PEnums.WpsInfo.WPS_SETUP_KEY:
-            WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_PBC
-        }
-    }
-    ad.droid.wifiP2pCreateGroupWithConfig(wifi_p2p_config)
-    ad.ed.pop_event(p2pconsts.CREATE_GROUP_SUCCESS_EVENT,
-                    p2pconsts.DEFAULT_TIMEOUT)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-
-
-def wifi_p2p_set_channels_for_current_group(ad, listening_chan,
-                                            operating_chan):
-    """Sets the listening channel and operating channel of the current group
-       created with initialize.
-
-    Args:
-        ad: The android device
-        listening_chan: Integer, the listening channel
-        operating_chan: Integer, the operating channel
-    """
-    ad.droid.wifiP2pSetChannelsForCurrentGroup(listening_chan, operating_chan)
-    ad.ed.pop_event(p2pconsts.SET_CHANNEL_SUCCESS_EVENT,
-                    p2pconsts.DEFAULT_TIMEOUT)
-
-
-class WifiP2PEnums():
-    class WifiP2pConfig():
-        DEVICEADDRESS_KEY = "deviceAddress"
-        WPSINFO_KEY = "wpsInfo"
-        GO_INTENT_KEY = "groupOwnerIntent"
-        NETID_KEY = "netId"
-        NETWORK_NAME = "networkName"
-        PASSPHRASE = "passphrase"
-        GROUP_BAND = "groupOwnerBand"
-
-    class WpsInfo():
-        WPS_SETUP_KEY = "setup"
-        BSSID_KEY = "BSSID"
-        WPS_PIN_KEY = "pin"
-        #TODO: remove it from wifi_test_utils.py
-        WIFI_WPS_INFO_PBC = 0
-        WIFI_WPS_INFO_DISPLAY = 1
-        WIFI_WPS_INFO_KEYPAD = 2
-        WIFI_WPS_INFO_LABEL = 3
-        WIFI_WPS_INFO_INVALID = 4
-
-    class WifiP2pServiceInfo():
-        #TODO: remove it from wifi_test_utils.py
-        # Macros for wifi p2p.
-        WIFI_P2P_SERVICE_TYPE_ALL = 0
-        WIFI_P2P_SERVICE_TYPE_BONJOUR = 1
-        WIFI_P2P_SERVICE_TYPE_UPNP = 2
-        WIFI_P2P_SERVICE_TYPE_VENDOR_SPECIFIC = 255
-
-    class WifiP2pDnsSdServiceResponse():
-        def __init__(self):
-            pass
-
-        InstanceName = ""
-        RegistrationType = ""
-        FullDomainName = ""
-        TxtRecordMap = {}
-
-        def toString(self):
-            return self.InstanceName + self.RegistrationType + (
-                self.FullDomainName + str(self.TxtRecordMap))
diff --git a/src/antlion/test_utils/wifi/pdu_controller_utils.py b/src/antlion/test_utils/wifi/pdu_controller_utils.py
deleted file mode 100644
index e243d33..0000000
--- a/src/antlion/test_utils/wifi/pdu_controller_utils.py
+++ /dev/null
@@ -1,203 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import time
-from antlion import logger
-from antlion import utils
-
-
-def create(configs):
-    """Factory method for PDU.
-
-    Args:
-        configs: list of dicts with pdu settings. settings must contain the
-        following type (string denoting type of pdu)
-    """
-    objs = []
-    for config in configs:
-        try:
-            pdu_class = globals()[config['device']]
-        except KeyError:
-            raise KeyError('Invalid pdu configuration.')
-        objs.append(pdu_class(config))
-    return objs
-
-
-def destroy(objs):
-    return
-
-
-class Pdu(object):
-    """Base class implementation for PDU.
-
-    Base class provides functions whose implementation is shared by all
-    chambers.
-    """
-
-    def on_all(self):
-        """Turn on all outlets."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def off_all(self):
-        """Turn off all outlets."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def _set_status(self, action, status):
-        """Set outlets to on or off."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def get_status(self):
-        """Get outlets status."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def turn_on_outlets(self, outlets):
-        """Turn on specific outlets."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def turn_off_outlets(self, outlets):
-        """Turn off specific outlets."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-
-class PanioPs1158(Pdu):
-    def __init__(self, config):
-        self.config = config.copy()
-        self.device_id = self.config['device_id']
-        self.log = logger.create_tagged_trace_logger('pdu_ps1158[{}]'.format(
-            self.device_id))
-
-    def on_all(self):
-        """Turn on all outlets"""
-        self._set_status("on", '11111111')
-
-    def off_all(self):
-        """Turn off all outlets"""
-        self._set_status("off", "11111111")
-
-    def _set_status(self, action, status):
-        """Set outlets to on or off.
-
-        Args:
-            action: "on" or "off"
-            status: 8 bits of 0 or 1. e.g., "11111111"
-        """
-        cmd = "curl http://{}:{}@{}/{}s.cgi?led={}".format(self.config['username'],
-                                                           self.config['password'],
-                                                           self.config['host'],
-                                                           action,
-                                                           status)
-        self.log.info("PDU cmd: {}".format(cmd))
-        utils.start_standing_subprocess(cmd)
-        time.sleep(10)
-
-    def get_status(self):
-        """Get outlets status
-
-        Returns:
-            A tuple of (outlets_list, outlets_str)
-                outlets_list:
-                    A List indicates the status of the outlets.
-                    e.g., outlet 1 is ON, returns:
-                        ['1', '0', '0', '0', '0', '0', '0', '0',]
-                    e.g., outlets 1 & 8 are ON, returns:
-                        ['1', '0', '0', '0', '0', '0', '0', '1']
-
-                outlets_str:
-                    A string indicates the status of the outlets.
-                    e.g., outlet 1 is ON:
-                        returns: '1'
-                    e.g., outlet 1 & 3 $ 8 are ON:
-                        returns: '138'
-        """
-        outlets_str = ""
-        cmd = "curl http://{}:{}@{}/status.xml".format(self.config['username'],
-                                                       self.config['password'],
-                                                       self.config['host'])
-        proc = utils.start_standing_subprocess(cmd)
-        time.sleep(1)
-        try:
-            outlets_list = proc.communicate()[0].decode().split(",")[10:18]
-
-            """Translate a list of strings to a sequence of strings.
-            e.g.
-                ['1', '0', '0', '0', '0', '0', '0', '0',] turns into '1'
-                ['1', '1', '1', '1', '1', '1', '1', '1'] turns into '12345678'
-            """
-            for i in range(len(outlets_list)):
-                if outlets_list[i] == '1':
-                    outlets_str = outlets_str + str(i + 1)
-        except:
-            raise KeyError("Fail to get status from PDU.")
-
-        return outlets_list, outlets_str
-
-    def turn_on_outlets(self, outlets):
-        """Turn specific outlets on
-
-        Args:
-            outlets: A string of outlet numbers.
-            e.g., '1' means outlets status will be: '10000000'
-            e.g., '378' means outlets status will be: '00100011'
-        """
-        self.off_all()
-        expect_outlets = ["1" if str(i) in outlets else "0" for i in range(1, 9)]
-        self._set_status("on", "".join(expect_outlets))
-
-        # Check if outlets are on as expected.
-        actual_outlets, _ = self.get_status()
-        self.log.info("Expect outlets : {}".format(expect_outlets))
-        self.log.info("Actual outlets : {}".format(actual_outlets))
-        if expect_outlets == actual_outlets:
-            self.log.info("Outlets are ON as expected")
-        else:
-            self.log.error("Outlets are not correctly turn on")
-
-    def turn_off_outlets(self, outlets):
-        """Turn specific outlets off
-
-        Args:
-            outlets: A string of outlet numbers.
-            e.g., '1' means outlets status will be: '01111111'
-            e.g., '378' means outlets status will be: '11011100'
-
-        """
-        self.on_all()
-        expect_outlets = ["1" if str(i) in outlets else "0" for i in range(1, 9)]
-        self._set_status("off", "".join(expect_outlets))
-
-        # Check if outlets are on as expected.
-        actual_outlets, _ = self.get_status()
-        temp_list = []
-
-        """When Turn off outlets, Panio ps1158 use "1" to turn off a outlet
-        (e.g., curl http://{}@{}/offs.cgi?led=00000001 to turn off outlet 8,
-        but actual outlets status will be '11111110', so need to
-        Turn "1" into "0" and vice versa to match the actual outlets status.
-        """
-        for outlet in expect_outlets:
-            if outlet == '1':
-                outlet = '0'
-                temp_list.append(outlet)
-            elif outlet == '0':
-                outlet = '1'
-                temp_list.append(outlet)
-        expect_outlets = temp_list
-        self.log.info("Expect outlets : {}".format(expect_outlets))
-        self.log.info("Actual outlets : {}".format(actual_outlets))
-        if expect_outlets == actual_outlets:
-            self.log.info("Outlets are OFF as expected")
-        else:
-            self.log.error("Outlets are not correctly turn off")
diff --git a/src/antlion/test_utils/wifi/rpm_controller_utils.py b/src/antlion/test_utils/wifi/rpm_controller_utils.py
deleted file mode 100644
index 8f1fd0c..0000000
--- a/src/antlion/test_utils/wifi/rpm_controller_utils.py
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from antlion.controllers.attenuator_lib._tnhelper import _ascii_string
-
-import logging
-import telnetlib
-
-ID = '.A'
-LOGIN_PWD = 'admn'
-ON = 'On'
-OFF = 'Off'
-PASSWORD = 'Password: '
-PORT = 23
-RPM_PROMPT = 'Switched CDU: '
-SEPARATOR = '\n'
-TIMEOUT = 3
-USERNAME = 'Username: '
-
-
-class RpmControllerError(Exception):
-    """Error related to RPM switch."""
-
-class RpmController(object):
-    """Class representing telnet to RPM switch.
-
-    Each object represents a telnet connection to the RPM switch's IP.
-
-    Attributes:
-        tn: represents a connection to RPM switch.
-        host: IP address of the RPM controller.
-    """
-    def __init__(self, host):
-        """Initializes the RPM controller object.
-
-        Establishes a telnet connection and login to the switch.
-        """
-        self.host = host
-        logging.info('RPM IP: %s' % self.host)
-
-        self.tn = telnetlib.Telnet(self.host)
-        self.tn.open(self.host, PORT, TIMEOUT)
-        self.run(USERNAME, LOGIN_PWD)
-        result = self.run(PASSWORD, LOGIN_PWD)
-        if RPM_PROMPT not in result:
-            raise RpmControllerError('Failed to login to rpm controller %s'
-                                     % self.host)
-
-    def run(self, prompt, cmd_str):
-        """Method to run commands on the RPM.
-
-        This method simply runs a command and returns output in decoded format.
-        The calling methods should take care of parsing the expected result
-        from this output.
-
-        Args:
-            prompt: Expected prompt before running a command.
-            cmd_str: Command to run on RPM.
-
-        Returns:
-            Decoded text returned by the command.
-        """
-        cmd_str = '%s%s' % (cmd_str, SEPARATOR)
-        res = self.tn.read_until(_ascii_string(prompt), TIMEOUT)
-
-        self.tn.write(_ascii_string(cmd_str))
-        idx, val, txt = self.tn.expect(
-            [_ascii_string('\S+%s' % SEPARATOR)], TIMEOUT)
-
-        return txt.decode()
-
-    def set_rpm_port_state(self, rpm_port, state):
-        """Method to turn on/off rpm port.
-
-        Args:
-            rpm_port: port number of the switch to turn on.
-            state: 'on' or 'off'
-
-        Returns:
-            True: if the state is set to the expected value
-        """
-        port = '%s%s' % (ID, rpm_port)
-        logging.info('Turning %s port: %s' % (state, port))
-        self.run(RPM_PROMPT, '%s %s' % (state.lower(), port))
-        result = self.run(RPM_PROMPT, 'status %s' % port)
-        if port not in result:
-            raise RpmControllerError('Port %s doesn\'t exist' % port)
-        return state in result
-
-    def turn_on(self, rpm_port):
-        """Method to turn on a port on the RPM switch.
-
-        Args:
-            rpm_port: port number of the switch to turn on.
-
-        Returns:
-            True if the port is turned on.
-            False if not turned on.
-        """
-        return self.set_rpm_port_state(rpm_port, ON)
-
-    def turn_off(self, rpm_port):
-        """Method to turn off a port on the RPM switch.
-
-        Args:
-            rpm_port: port number of the switch to turn off.
-
-        Returns:
-            True if the port is turned off.
-            False if not turned off.
-        """
-        return self.set_rpm_port_state(rpm_port, OFF)
-
-    def __del__(self):
-        """Close the telnet connection. """
-        self.tn.close()
-
-
-def create_telnet_session(ip):
-    """Returns telnet connection object to RPM's IP."""
-    return RpmController(ip)
-
-def turn_on_ap(pcap, ssid, rpm_port, rpm_ip=None, rpm=None):
-    """Turn on the AP.
-
-    This method turns on the RPM port the AP is connected to,
-    verify the SSID of the AP is found in the scan result through the
-    packet capturer.
-
-    Either IP addr of the RPM switch or the existing telnet connection
-    to the RPM is required. Multiple APs might be connected to the same RPM
-    switch. Instead of connecting/terminating telnet for each AP, the test
-    can maintain a single telnet connection for all the APs.
-
-    Args:
-        pcap: packet capture object.
-        ssid: SSID of the wifi network.
-        rpm_port: Port number on the RPM switch the AP is connected to.
-        rpm_ip: IP address of the RPM switch.
-        rpm: telnet connection object to the RPM switch.
-    """
-    if not rpm and not rpm_ip:
-        logging.error("Failed to turn on AP. Need telnet object or RPM IP")
-        return False
-    elif not rpm:
-        rpm = create_telnet_session(rpm_ip)
-
-    return rpm.turn_on(rpm_port) and pcap.start_scan_and_find_network(ssid)
-
-def turn_off_ap(rpm_port, rpm_ip=None, rpm=None):
-    """ Turn off AP.
-
-    This method turns off the RPM port the AP is connected to.
-
-    Either IP addr of the RPM switch or the existing telnet connection
-    to the RPM is required.
-
-    Args:
-        rpm_port: Port number on the RPM switch the AP is connected to.
-        rpm_ip: IP address of the RPM switch.
-        rpm: telnet connection object to the RPM switch.
-    """
-    if not rpm and not rpm_ip:
-        logging.error("Failed to turn off AP. Need telnet object or RPM IP")
-        return False
-    elif not rpm:
-        rpm = create_telnet_session(rpm_ip)
-
-    return rpm.turn_off(rpm_port)
diff --git a/src/antlion/test_utils/wifi/rtt/RttBaseTest.py b/src/antlion/test_utils/wifi/rtt/RttBaseTest.py
deleted file mode 100644
index bbd3efd..0000000
--- a/src/antlion/test_utils/wifi/rtt/RttBaseTest.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-from antlion import asserts
-from antlion import utils
-from antlion.base_test import BaseTestClass
-from antlion.keys import Config
-from antlion.test_utils.net import net_test_utils as nutils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.rtt import rtt_const as rconsts
-from antlion.test_utils.wifi.rtt import rtt_test_utils as rutils
-
-
-class RttBaseTest(BaseTestClass):
-
-    def setup_class(self):
-        opt_param = ["pixel_models", "cnss_diag_file", "ranging_role_concurrency_flexible_models"]
-        self.unpack_userparams(opt_param_names=opt_param)
-        if hasattr(self, "cnss_diag_file"):
-            if isinstance(self.cnss_diag_file, list):
-                self.cnss_diag_file = self.cnss_diag_file[0]
-            if not os.path.isfile(self.cnss_diag_file):
-                self.cnss_diag_file = os.path.join(
-                    self.user_params[Config.key_config_path.value],
-                    self.cnss_diag_file)
-
-    def setup_test(self):
-        required_params = ("lci_reference", "lcr_reference",
-                           "rtt_reference_distance_mm",
-                           "stress_test_min_iteration_count",
-                           "stress_test_target_run_time_sec")
-        self.unpack_userparams(required_params)
-
-        # can be moved to JSON config file
-        self.rtt_reference_distance_margin_mm = 2000
-        self.rtt_max_failure_rate_two_sided_rtt_percentage = 20
-        self.rtt_max_failure_rate_one_sided_rtt_percentage = 50
-        self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage = 10
-        self.rtt_max_margin_exceeded_rate_one_sided_rtt_percentage = 50
-        self.rtt_min_expected_rssi_dbm = -100
-
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.start_cnss_diags(
-                self.android_devices, self.cnss_diag_file, self.pixel_models)
-        self.tcpdump_proc = []
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                proc = nutils.start_tcpdump(ad, self.test_name)
-                self.tcpdump_proc.append((ad, proc))
-
-        for ad in self.android_devices:
-            utils.set_location_service(ad, True)
-            ad.droid.wifiEnableVerboseLogging(1)
-            asserts.skip_if(
-                not ad.droid.doesDeviceSupportWifiRttFeature(),
-                "Device under test does not support Wi-Fi RTT - skipping test")
-            wutils.wifi_toggle_state(ad, True)
-            rtt_avail = ad.droid.wifiIsRttAvailable()
-            if not rtt_avail:
-                self.log.info('RTT not available. Waiting ...')
-                rutils.wait_for_event(ad, rconsts.BROADCAST_WIFI_RTT_AVAILABLE)
-            ad.ed.clear_all_events()
-            rutils.config_privilege_override(ad, False)
-            wutils.set_wifi_country_code(ad, wutils.WifiEnums.CountryCode.US)
-            ad.rtt_capabilities = rutils.get_rtt_capabilities(ad)
-
-    def teardown_test(self):
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(
-                    proc[0], proc[1], self.test_name, pull_dump=False)
-        self.tcpdump_proc = []
-        for ad in self.android_devices:
-            if not ad.droid.doesDeviceSupportWifiRttFeature():
-                return
-
-            # clean-up queue from the System Service UID
-            ad.droid.wifiRttCancelRanging([1000])
-
-    def on_fail(self, test_name, begin_time):
-        for ad in self.android_devices:
-            ad.take_bug_report(test_name, begin_time)
-            ad.cat_adb_log(test_name, begin_time)
-            wutils.get_ssrdumps(ad)
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-            for ad in self.android_devices:
-                wutils.get_cnss_diag_log(ad)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(proc[0], proc[1], self.test_name)
-        self.tcpdump_proc = []
diff --git a/src/antlion/test_utils/wifi/rtt/__init__.py b/src/antlion/test_utils/wifi/rtt/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/wifi/rtt/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/wifi/rtt/rtt_const.py b/src/antlion/test_utils/wifi/rtt/rtt_const.py
deleted file mode 100644
index 379b9b1..0000000
--- a/src/antlion/test_utils/wifi/rtt/rtt_const.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-######################################################
-# Broadcast events
-######################################################
-BROADCAST_WIFI_RTT_AVAILABLE = "WifiRttAvailable"
-BROADCAST_WIFI_RTT_NOT_AVAILABLE = "WifiRttNotAvailable"
-
-######################################################
-# RangingResultCallback events
-######################################################
-EVENT_CB_RANGING_ON_FAIL = "WifiRttRangingFailure"
-EVENT_CB_RANGING_ON_RESULT = "WifiRttRangingResults"
-
-EVENT_CB_RANGING_KEY_RESULTS = "Results"
-
-EVENT_CB_RANGING_KEY_STATUS = "status"
-EVENT_CB_RANGING_KEY_DISTANCE_MM = "distanceMm"
-EVENT_CB_RANGING_KEY_DISTANCE_STD_DEV_MM = "distanceStdDevMm"
-EVENT_CB_RANGING_KEY_RSSI = "rssi"
-EVENT_CB_RANGING_KEY_NUM_ATTEMPTED_MEASUREMENTS = "numAttemptedMeasurements"
-EVENT_CB_RANGING_KEY_NUM_SUCCESSFUL_MEASUREMENTS = "numSuccessfulMeasurements"
-EVENT_CB_RANGING_KEY_LCI = "lci"
-EVENT_CB_RANGING_KEY_LCR = "lcr"
-EVENT_CB_RANGING_KEY_TIMESTAMP = "timestamp"
-EVENT_CB_RANGING_KEY_MAC = "mac"
-EVENT_CB_RANGING_KEY_PEER_ID = "peerId"
-EVENT_CB_RANGING_KEY_MAC_AS_STRING = "macAsString"
-
-EVENT_CB_RANGING_STATUS_SUCCESS = 0
-EVENT_CB_RANGING_STATUS_FAIL = 1
-EVENT_CB_RANGING_STATUS_RESPONDER_DOES_NOT_SUPPORT_IEEE80211MC = 2
-
-######################################################
-# status codes
-######################################################
-
-RANGING_FAIL_CODE_GENERIC = 1
-RANGING_FAIL_CODE_RTT_NOT_AVAILABLE = 2
-
-######################################################
-# ScanResults keys
-######################################################
-
-SCAN_RESULT_KEY_RTT_RESPONDER = "is80211McRTTResponder"
-
-######################################################
-# Capabilities keys
-######################################################
-
-CAP_RTT_ONE_SIDED_SUPPORTED = "rttOneSidedSupported"
-CAP_FTM_SUPPORTED = "rttFtmSupported"
-CAP_LCI_SUPPORTED = "lciSupported"
-CAP_LCR_SUPPORTED = "lcrSupported"
-CAP_RESPONDER_SUPPORTED = "responderSupported"
-CAP_MC_VERSION = "mcVersion"
diff --git a/src/antlion/test_utils/wifi/rtt/rtt_test_utils.py b/src/antlion/test_utils/wifi/rtt/rtt_test_utils.py
deleted file mode 100644
index a0777d8..0000000
--- a/src/antlion/test_utils/wifi/rtt/rtt_test_utils.py
+++ /dev/null
@@ -1,501 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import queue
-import statistics
-import time
-
-from antlion import asserts
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.rtt import rtt_const as rconsts
-
-# arbitrary timeout for events
-EVENT_TIMEOUT = 15
-
-
-def decorate_event(event_name, id):
-    return '%s_%d' % (event_name, id)
-
-
-def wait_for_event(ad, event_name, timeout=EVENT_TIMEOUT):
-    """Wait for the specified event or timeout.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait on
-    timeout: Number of seconds to wait
-  Returns:
-    The event (if available)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.pop_event(event_name, timeout)
-        ad.log.info('%s%s: %s', prefix, event_name, event['data'])
-        return event
-    except queue.Empty:
-        ad.log.info('%sTimed out while waiting for %s', prefix, event_name)
-        asserts.fail(event_name)
-
-
-def fail_on_event(ad, event_name, timeout=EVENT_TIMEOUT):
-    """Wait for a timeout period and looks for the specified event - fails if it
-  is observed.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait for (and fail on its appearance)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.pop_event(event_name, timeout)
-        ad.log.info('%sReceived unwanted %s: %s', prefix, event_name,
-                    event['data'])
-        asserts.fail(event_name, extras=event)
-    except queue.Empty:
-        ad.log.info('%s%s not seen (as expected)', prefix, event_name)
-        return
-
-
-def get_rtt_capabilities(ad):
-    """Get the Wi-Fi RTT capabilities from the specified device. The
-  capabilities are a dictionary keyed by rtt_const.CAP_* keys.
-
-  Args:
-    ad: the Android device
-  Returns: the capability dictionary.
-  """
-    return json.loads(ad.adb.shell('cmd wifirtt get_capabilities'))
-
-
-def config_privilege_override(dut, override_to_no_privilege):
-    """Configure the device to override the permission check and to disallow any
-  privileged RTT operations, e.g. disallow one-sided RTT to Responders (APs)
-  which do not support IEEE 802.11mc.
-
-  Args:
-    dut: Device to configure.
-    override_to_no_privilege: True to indicate no privileged ops, False for
-                              default (which will allow privileged ops).
-  """
-    dut.adb.shell("cmd wifirtt set override_assume_no_privilege %d" %
-                  (1 if override_to_no_privilege else 0))
-
-
-def get_rtt_constrained_results(scanned_networks, support_rtt):
-    """Filter the input list and only return those networks which either support
-  or do not support RTT (IEEE 802.11mc.)
-
-  Args:
-    scanned_networks: A list of networks from scan results.
-      support_rtt: True - only return those APs which support RTT, False - only
-                   return those APs which do not support RTT.
-
-  Returns: a sub-set of the scanned_networks per support_rtt constraint.
-  """
-    matching_networks = []
-    for network in scanned_networks:
-        if support_rtt:
-            if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in network
-                    and network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
-                matching_networks.append(network)
-        else:
-            if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER not in network
-                    or not network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
-                matching_networks.append(network)
-
-    return matching_networks
-
-
-def scan_networks(dut, max_tries=3):
-    """Perform a scan and return scan results.
-
-  Args:
-    dut: Device under test.
-    max_retries: Retry scan to ensure network is found
-
-  Returns: an array of scan results.
-  """
-    scan_results = []
-    for num_tries in range(max_tries):
-        wutils.start_wifi_connection_scan(dut)
-        scan_results = dut.droid.wifiGetScanResults()
-        if scan_results:
-            break
-    return scan_results
-
-
-def scan_with_rtt_support_constraint(dut, support_rtt, repeat=0):
-    """Perform a scan and return scan results of APs: only those that support or
-  do not support RTT (IEEE 802.11mc) - per the support_rtt parameter.
-
-  Args:
-    dut: Device under test.
-    support_rtt: True - only return those APs which support RTT, False - only
-                 return those APs which do not support RTT.
-    repeat: Re-scan this many times to find an RTT supporting network.
-
-  Returns: an array of scan results.
-  """
-    for i in range(repeat + 1):
-        scan_results = scan_networks(dut)
-        aps = get_rtt_constrained_results(scan_results, support_rtt)
-        if len(aps) != 0:
-            return aps
-
-    return []
-
-
-def select_best_scan_results(scans, select_count, lowest_rssi=-80):
-    """Select the strongest 'select_count' scans in the input list based on
-  highest RSSI. Exclude all very weak signals, even if results in a shorter
-  list.
-
-  Args:
-    scans: List of scan results.
-    select_count: An integer specifying how many scans to return at most.
-    lowest_rssi: The lowest RSSI to accept into the output.
-  Returns: a list of the strongest 'select_count' scan results from the scans
-           list.
-  """
-
-    def takeRssi(element):
-        return element['level']
-
-    result = []
-    scans.sort(key=takeRssi, reverse=True)
-    for scan in scans:
-        if len(result) == select_count:
-            break
-        if scan['level'] < lowest_rssi:
-            break  # rest are lower since we're sorted
-        result.append(scan)
-
-    return result
-
-
-def validate_ap_result(scan_result, range_result):
-    """Validate the range results:
-  - Successful if AP (per scan result) support 802.11mc (allowed to fail
-    otherwise)
-  - MAC of result matches the BSSID
-
-  Args:
-    scan_result: Scan result for the AP
-    range_result: Range result returned by the RTT API
-  """
-    asserts.assert_equal(
-        scan_result[wutils.WifiEnums.BSSID_KEY],
-        range_result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID],
-        'MAC/BSSID mismatch')
-    if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in scan_result
-            and scan_result[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
-        asserts.assert_true(
-            range_result[rconsts.EVENT_CB_RANGING_KEY_STATUS] ==
-            rconsts.EVENT_CB_RANGING_STATUS_SUCCESS,
-            'Ranging failed for an AP which supports 802.11mc!')
-
-
-def validate_ap_results(scan_results, range_results):
-    """Validate an array of ranging results against the scan results used to
-  trigger the range. The assumption is that the results are returned in the
-  same order as the request (which were the scan results).
-
-  Args:
-    scan_results: Scans results used to trigger the range request
-    range_results: Range results returned by the RTT API
-  """
-    asserts.assert_equal(
-        len(scan_results), len(range_results),
-        'Mismatch in length of scan results and range results')
-
-    # sort first based on BSSID/MAC
-    scan_results.sort(key=lambda x: x[wutils.WifiEnums.BSSID_KEY])
-    range_results.sort(
-        key=lambda x: x[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID])
-
-    for i in range(len(scan_results)):
-        validate_ap_result(scan_results[i], range_results[i])
-
-
-def validate_aware_mac_result(range_result, mac, description):
-    """Validate the range result for an Aware peer specified with a MAC address:
-  - Correct MAC address.
-
-  The MAC addresses may contain ":" (which are ignored for the comparison) and
-  may be in any case (which is ignored for the comparison).
-
-  Args:
-    range_result: Range result returned by the RTT API
-    mac: MAC address of the peer
-    description: Additional content to print on failure
-  """
-    mac1 = mac.replace(':', '').lower()
-    mac2 = range_result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING].replace(
-        ':', '').lower()
-    asserts.assert_equal(mac1, mac2, '%s: MAC mismatch' % description)
-
-
-def validate_aware_peer_id_result(range_result, peer_id, description):
-    """Validate the range result for An Aware peer specified with a Peer ID:
-  - Correct Peer ID
-  - MAC address information not available
-
-  Args:
-    range_result: Range result returned by the RTT API
-    peer_id: Peer ID of the peer
-    description: Additional content to print on failure
-  """
-    asserts.assert_equal(peer_id,
-                         range_result[rconsts.EVENT_CB_RANGING_KEY_PEER_ID],
-                         '%s: Peer Id mismatch' % description)
-    asserts.assert_false(rconsts.EVENT_CB_RANGING_KEY_MAC in range_result,
-                         '%s: MAC Address not empty!' % description)
-
-
-def extract_stats(results,
-                  range_reference_mm,
-                  range_margin_mm,
-                  min_rssi,
-                  reference_lci=[],
-                  reference_lcr=[],
-                  summary_only=False):
-    """Extract statistics from a list of RTT results. Returns a dictionary
-   with results:
-     - num_results (success or fails)
-     - num_success_results
-     - num_no_results (e.g. timeout)
-     - num_failures
-     - num_range_out_of_margin (only for successes)
-     - num_invalid_rssi (only for successes)
-     - distances: extracted list of distances
-     - distance_std_devs: extracted list of distance standard-deviations
-     - rssis: extracted list of RSSI
-     - distance_mean
-     - distance_std_dev (based on distance - ignoring the individual std-devs)
-     - rssi_mean
-     - rssi_std_dev
-     - status_codes
-     - lcis: extracted list of all of the individual LCI
-     - lcrs: extracted list of all of the individual LCR
-     - any_lci_mismatch: True/False - checks if all LCI results are identical to
-                         the reference LCI.
-     - any_lcr_mismatch: True/False - checks if all LCR results are identical to
-                         the reference LCR.
-     - num_attempted_measurements: extracted list of all of the individual
-                                   number of attempted measurements.
-     - num_successful_measurements: extracted list of all of the individual
-                                    number of successful measurements.
-     - invalid_num_attempted: True/False - checks if number of attempted
-                              measurements is non-zero for successful results.
-     - invalid_num_successful: True/False - checks if number of successful
-                               measurements is non-zero for successful results.
-
-  Args:
-    results: List of RTT results.
-    range_reference_mm: Reference value for the distance (in mm)
-    range_margin_mm: Acceptable absolute margin for distance (in mm)
-    min_rssi: Acceptable minimum RSSI value.
-    reference_lci, reference_lcr: Reference values for LCI and LCR.
-    summary_only: Only include summary keys (reduce size).
-
-  Returns: A dictionary of stats.
-  """
-    stats = {}
-    stats['num_results'] = 0
-    stats['num_success_results'] = 0
-    stats['num_no_results'] = 0
-    stats['num_failures'] = 0
-    stats['num_range_out_of_margin'] = 0
-    stats['num_invalid_rssi'] = 0
-    stats['any_lci_mismatch'] = False
-    stats['any_lcr_mismatch'] = False
-    stats['invalid_num_attempted'] = False
-    stats['invalid_num_successful'] = False
-
-    range_max_mm = range_reference_mm + range_margin_mm
-    range_min_mm = range_reference_mm - range_margin_mm
-
-    distances = []
-    distance_std_devs = []
-    rssis = []
-    num_attempted_measurements = []
-    num_successful_measurements = []
-    status_codes = []
-    lcis = []
-    lcrs = []
-
-    for i in range(len(results)):
-        result = results[i]
-
-        if result is None:  # None -> timeout waiting for RTT result
-            stats['num_no_results'] = stats['num_no_results'] + 1
-            continue
-        stats['num_results'] = stats['num_results'] + 1
-
-        status_codes.append(result[rconsts.EVENT_CB_RANGING_KEY_STATUS])
-        if status_codes[-1] != rconsts.EVENT_CB_RANGING_STATUS_SUCCESS:
-            stats['num_failures'] = stats['num_failures'] + 1
-            continue
-        stats['num_success_results'] = stats['num_success_results'] + 1
-
-        distance_mm = result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_MM]
-        distances.append(distance_mm)
-        if not range_min_mm <= distance_mm <= range_max_mm:
-            stats[
-                'num_range_out_of_margin'] = stats['num_range_out_of_margin'] + 1
-        distance_std_devs.append(
-            result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_STD_DEV_MM])
-
-        rssi = result[rconsts.EVENT_CB_RANGING_KEY_RSSI]
-        rssis.append(rssi)
-        if not min_rssi <= rssi <= 0:
-            stats['num_invalid_rssi'] = stats['num_invalid_rssi'] + 1
-
-        num_attempted = result[
-            rconsts.EVENT_CB_RANGING_KEY_NUM_ATTEMPTED_MEASUREMENTS]
-        num_attempted_measurements.append(num_attempted)
-        if num_attempted == 0:
-            stats['invalid_num_attempted'] = True
-
-        num_successful = result[
-            rconsts.EVENT_CB_RANGING_KEY_NUM_SUCCESSFUL_MEASUREMENTS]
-        num_successful_measurements.append(num_successful)
-        if num_successful == 0:
-            stats['invalid_num_successful'] = True
-
-        lcis.append(result[rconsts.EVENT_CB_RANGING_KEY_LCI])
-        if (result[rconsts.EVENT_CB_RANGING_KEY_LCI] != reference_lci):
-            stats['any_lci_mismatch'] = True
-        lcrs.append(result[rconsts.EVENT_CB_RANGING_KEY_LCR])
-        if (result[rconsts.EVENT_CB_RANGING_KEY_LCR] != reference_lcr):
-            stats['any_lcr_mismatch'] = True
-
-    if len(distances) > 0:
-        stats['distance_mean'] = statistics.mean(distances)
-    if len(distances) > 1:
-        stats['distance_std_dev'] = statistics.stdev(distances)
-    if len(rssis) > 0:
-        stats['rssi_mean'] = statistics.mean(rssis)
-    if len(rssis) > 1:
-        stats['rssi_std_dev'] = statistics.stdev(rssis)
-    if not summary_only:
-        stats['distances'] = distances
-        stats['distance_std_devs'] = distance_std_devs
-        stats['rssis'] = rssis
-        stats['num_attempted_measurements'] = num_attempted_measurements
-        stats['num_successful_measurements'] = num_successful_measurements
-        stats['status_codes'] = status_codes
-        stats['lcis'] = lcis
-        stats['lcrs'] = lcrs
-
-    return stats
-
-
-def run_ranging(dut,
-                aps,
-                iter_count,
-                time_between_iterations,
-                target_run_time_sec=0):
-    """Executing ranging to the set of APs.
-
-  Will execute a minimum of 'iter_count' iterations. Will continue to run
-  until execution time (just) exceeds 'target_run_time_sec'.
-
-  Args:
-    dut: Device under test
-    aps: A list of APs (Access Points) to range to.
-    iter_count: (Minimum) Number of measurements to perform.
-    time_between_iterations: Number of seconds to wait between iterations.
-    target_run_time_sec: The target run time in seconds.
-
-  Returns: a list of the events containing the RTT results (or None for a
-  failed measurement).
-  """
-    max_peers = dut.droid.wifiRttMaxPeersInRequest()
-
-    asserts.assert_true(len(aps) > 0, "Need at least one AP!")
-    if len(aps) > max_peers:
-        aps = aps[0:max_peers]
-
-    events = {}  # need to keep track per BSSID!
-    for ap in aps:
-        events[ap["BSSID"]] = []
-
-    start_clock = time.time()
-    iterations_done = 0
-    run_time = 0
-    while iterations_done < iter_count or (target_run_time_sec != 0
-                                           and run_time < target_run_time_sec):
-        if iterations_done != 0 and time_between_iterations != 0:
-            time.sleep(time_between_iterations)
-
-        id = dut.droid.wifiRttStartRangingToAccessPoints(aps)
-        try:
-            event = dut.ed.pop_event(
-                decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT, id),
-                EVENT_TIMEOUT)
-            range_results = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS]
-            asserts.assert_equal(
-                len(aps), len(range_results),
-                'Mismatch in length of scan results and range results')
-            for result in range_results:
-                bssid = result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING]
-                asserts.assert_true(
-                    bssid in events,
-                    "Result BSSID %s not in requested AP!?" % bssid)
-                asserts.assert_equal(
-                    len(events[bssid]), iterations_done,
-                    "Duplicate results for BSSID %s!?" % bssid)
-                events[bssid].append(result)
-        except queue.Empty:
-            for ap in aps:
-                events[ap["BSSID"]].append(None)
-
-        iterations_done = iterations_done + 1
-        run_time = time.time() - start_clock
-
-    return events
-
-
-def analyze_results(all_aps_events,
-                    rtt_reference_distance_mm,
-                    distance_margin_mm,
-                    min_expected_rssi,
-                    lci_reference,
-                    lcr_reference,
-                    summary_only=False):
-    """Verifies the results of the RTT experiment.
-
-  Args:
-    all_aps_events: Dictionary of APs, each a list of RTT result events.
-    rtt_reference_distance_mm: Expected distance to the AP (source of truth).
-    distance_margin_mm: Accepted error marging in distance measurement.
-    min_expected_rssi: Minimum acceptable RSSI value
-    lci_reference, lcr_reference: Expected LCI/LCR values (arrays of bytes).
-    summary_only: Only include summary keys (reduce size).
-  """
-    all_stats = {}
-    for bssid, events in all_aps_events.items():
-        stats = extract_stats(events, rtt_reference_distance_mm,
-                              distance_margin_mm, min_expected_rssi,
-                              lci_reference, lcr_reference, summary_only)
-        all_stats[bssid] = stats
-    return all_stats
diff --git a/src/antlion/test_utils/wifi/wifi_constants.py b/src/antlion/test_utils/wifi/wifi_constants.py
index f07c987..a348f81 100644
--- a/src/antlion/test_utils/wifi/wifi_constants.py
+++ b/src/antlion/test_utils/wifi/wifi_constants.py
@@ -21,8 +21,12 @@
 WIFI_STATE_CHANGED = "WifiStateChanged"
 WIFI_FORGET_NW_SUCCESS = "WifiManagerForgetNetworkOnSuccess"
 WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH = "WifiManagerNetworkRequestMatchCallbackOnMatch"
-WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_SUCCESS = "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectSuccess"
-WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_FAILURE = "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectFailure"
+WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_SUCCESS = (
+    "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectSuccess"
+)
+WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_FAILURE = (
+    "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectFailure"
+)
 WIFI_NETWORK_CB_ON_AVAILABLE = "WifiManagerNetworkCallbackOnAvailable"
 WIFI_NETWORK_CB_ON_UNAVAILABLE = "WifiManagerNetworkCallbackOnUnavailable"
 WIFI_NETWORK_CB_ON_LOST = "WifiManagerNetworkCallbackOnLost"
@@ -31,8 +35,8 @@
 WIFI_SUBSYSTEM_RESTARTED = "WifiSubsystemRestarted"
 
 # These constants will be used by the ACTS wifi tests.
-CONNECT_BY_CONFIG_SUCCESS = 'WifiManagerConnectByConfigOnSuccess'
-CONNECT_BY_NETID_SUCCESS = 'WifiManagerConnectByNetIdOnSuccess'
+CONNECT_BY_CONFIG_SUCCESS = "WifiManagerConnectByConfigOnSuccess"
+CONNECT_BY_NETID_SUCCESS = "WifiManagerConnectByNetIdOnSuccess"
 
 # Softap related constants
 SOFTAP_CALLBACK_EVENT = "WifiManagerSoftApCallback-"
@@ -86,7 +90,7 @@
 SOFTAP_CAPABILITY_FEATURE_6GHZ = "6gSupported"
 SOFTAP_CAPABILITY_FEATURE_60GHZ = "60gSupported"
 
-DEFAULT_SOFTAP_TIMEOUT_S = 600 # 10 minutes
+DEFAULT_SOFTAP_TIMEOUT_S = 600  # 10 minutes
 
 # AP related constants
 AP_MAIN = "main_AP"
diff --git a/src/antlion/test_utils/wifi/wifi_datastore_utils.py b/src/antlion/test_utils/wifi/wifi_datastore_utils.py
deleted file mode 100755
index 0cd0820..0000000
--- a/src/antlion/test_utils/wifi/wifi_datastore_utils.py
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-import requests
-
-from antlion import asserts
-from antlion import signals
-from antlion import utils
-from antlion.test_utils.wifi import wifi_constants
-"""This file consists of all the helper methods needed to interact with the
-   Datastore @ https://chaos-188802.appspot.com/ used for Android Interop
-   testing.
-"""
-
-DATASTORE_HOST = "https://chaos-188802.appspot.com"
-
-# The Datastore defines the following paths for operating methods.
-ADD_DEVICE = "devices/new"
-REMOVE_DEVICE = "devices/delete"
-LOCK_DEVICE = "devices/lock"
-UNLOCK_DEVICE = "devices/unlock"
-SHOW_DEVICE = "devices/"
-GET_DEVICES = "devices/"
-
-# HTTP content type. JSON encoded with UTF-8 character encoding.
-HTTP_HEADER = {'content-type': 'application/json'}
-
-
-def add_device(name, ap_label, lab_label):
-    """Add a device(AP or Packet Capturer) in datastore.
-
-       Args:
-           name: string, hostname of the device.
-           ap_label: string, AP brand name.
-           lab_label: string, lab label for AP.
-       Returns:
-          True if device was added successfully; 0 otherwise.
-    """
-    request = DATASTORE_HOST + '/' + ADD_DEVICE
-    logging.debug("Request = %s" % request)
-    response = requests.post(request,
-                             headers=HTTP_HEADER,
-                             data=json.dumps({
-                                 "hostname": name,
-                                 "ap_label": ap_label,
-                                 "lab_label": lab_label
-                             }))
-    if response.json()['result'] == 'success':
-        logging.info("Added device %s to datastore" % name)
-        return True
-    return False
-
-
-def remove_device(name):
-    """Delete a device(AP or Packet Capturer) in datastore.
-
-       Args:
-           name: string, hostname of the device to delete.
-       Returns:
-           True if device was deleted successfully; 0 otherwise.
-    """
-    request = DATASTORE_HOST + '/' + REMOVE_DEVICE
-    logging.debug("Request = %s" % request)
-    response = requests.put(request,
-                            headers=HTTP_HEADER,
-                            data=json.dumps({"hostname": name}))
-    result_str = "%s deleted." % name
-    if result_str in response.text:
-        logging.info("Removed device %s from datastore" % name)
-        return True
-    return False
-
-
-def lock_device(name, admin):
-    """Lock a device(AP or Packet Capturer) in datastore.
-
-       Args:
-           name: string, hostname of the device in datastore.
-           admin: string, unique admin name for locking.
-      Returns:
-          True if operation was successful; 0 otherwise.
-    """
-    request = DATASTORE_HOST + '/' + LOCK_DEVICE
-    logging.debug("Request = %s" % request)
-    response = requests.put(request,
-                            headers=HTTP_HEADER,
-                            data=json.dumps({
-                                "hostname": name,
-                                "locked_by": admin
-                            }))
-    if response.json()['result']:
-        logging.info("Locked device %s in datastore" % name)
-        return True
-    return False
-
-
-def unlock_device(name):
-    """Un-lock a device(AP or Packet Capturer) in datastore.
-
-       Args:
-           name: string, hostname of the device in datastore.
-      Returns:
-          True if operation was successful; 0 otherwise.
-    """
-    request = DATASTORE_HOST + '/' + UNLOCK_DEVICE
-    logging.debug("Request = %s" % request)
-    response = requests.put(request,
-                            headers=HTTP_HEADER,
-                            data=json.dumps({"hostname": name}))
-    if response.json()['result']:
-        logging.info("Finished un-locking AP %s in datastore" % name)
-        return True
-    return False
-
-
-def show_device(name):
-    """Show device properties for a given device(AP or Packet Capturer).
-
-       Args:
-           name: string, hostname of the device in datastore to fetch info.
-           Returns: dict of device name:value properties if successful;
-                    None otherwise.
-    """
-    request = DATASTORE_HOST + '/' + SHOW_DEVICE + name
-    logging.debug("Request = %s" % request)
-    response = requests.get(request)
-    if 'error' in response.text:
-        return None
-    return response.json()
-
-
-def get_devices():
-    """Get a list of all devices in the datastore.
-
-    Returns: dict of all devices' name:value properties if successful;
-             None otherwise.
-    """
-    request = DATASTORE_HOST + '/' + GET_DEVICES
-    logging.debug("Request = %s" % request)
-    response = requests.get(request)
-    if 'error' in response.text:
-        return None
-    return response.json()
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py
index 26466a8..567077e 100644
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py
+++ b/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py
@@ -21,7 +21,8 @@
 import numpy
 import re
 import time
-from antlion import asserts
+from concurrent.futures import ThreadPoolExecutor
+
 from antlion import utils
 from antlion.controllers.android_device import AndroidDevice
 from antlion.controllers.utils_lib import ssh
@@ -30,22 +31,22 @@
 from antlion.test_utils.wifi.wifi_performance_test_utils import qcom_utils
 from antlion.test_utils.wifi.wifi_performance_test_utils import brcm_utils
 
-from concurrent.futures import ThreadPoolExecutor
+from mobly import asserts
 
 SHORT_SLEEP = 1
 MED_SLEEP = 6
-CHANNELS_6GHz = ['6g{}'.format(4 * x + 1) for x in range(59)]
+CHANNELS_6GHz = ["6g{}".format(4 * x + 1) for x in range(59)]
 BAND_TO_CHANNEL_MAP = {
-    '2.4GHz': list(range(1, 14)),
-    'UNII-1': [36, 40, 44, 48],
-    'UNII-2':
-    [52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128, 132, 140],
-    'UNII-3': [149, 153, 157, 161, 165],
-    '6GHz': CHANNELS_6GHz
+    "2.4GHz": list(range(1, 14)),
+    "UNII-1": [36, 40, 44, 48],
+    "UNII-2": [52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128, 132, 140],
+    "UNII-3": [149, 153, 157, 161, 165],
+    "6GHz": CHANNELS_6GHz,
 }
 CHANNEL_TO_BAND_MAP = {
     channel: band
-    for band, channels in BAND_TO_CHANNEL_MAP.items() for channel in channels
+    for band, channels in BAND_TO_CHANNEL_MAP.items()
+    for channel in channels
 }
 
 
@@ -64,27 +65,30 @@
 
 
 def detect_wifi_platform(dut):
-    if hasattr(dut, 'wifi_platform'):
+    if hasattr(dut, "wifi_platform"):
         return dut.wifi_platform
-    qcom_check = len(dut.get_file_names('/vendor/firmware/wlan/qca_cld/'))
+    qcom_check = len(dut.get_file_names("/vendor/firmware/wlan/qca_cld/"))
     if qcom_check:
-        dut.wifi_platform = 'qcom'
+        dut.wifi_platform = "qcom"
     else:
-        dut.wifi_platform = 'brcm'
+        dut.wifi_platform = "brcm"
     return dut.wifi_platform
 
 
 def detect_wifi_decorator(f):
     def wrap(*args, **kwargs):
-        if 'dut' in kwargs:
-            dut = kwargs['dut']
+        if "dut" in kwargs:
+            dut = kwargs["dut"]
         else:
             dut = next(arg for arg in args if type(arg) == AndroidDevice)
-        dut_package = 'acts_contrib.test_utils.wifi.wifi_performance_test_utils.{}_utils'.format(
-            detect_wifi_platform(dut))
+        dut_package = (
+            "acts_contrib.test_utils.wifi.wifi_performance_test_utils.{}_utils".format(
+                detect_wifi_platform(dut)
+            )
+        )
         dut_package = importlib.import_module(dut_package)
         f_decorated = getattr(dut_package, f.__name__, lambda: None)
-        return (f_decorated(*args, **kwargs))
+        return f_decorated(*args, **kwargs)
 
     return wrap
 
@@ -119,34 +123,34 @@
 
 
 def extract_sub_dict(full_dict, fields):
-    sub_dict = collections.OrderedDict(
-        (field, full_dict[field]) for field in fields)
+    sub_dict = collections.OrderedDict((field, full_dict[field]) for field in fields)
     return sub_dict
 
 
 # Miscellaneous Wifi Utilities
-def check_skip_conditions(testcase_params, dut, access_point,
-                          ota_chamber=None):
+def check_skip_conditions(testcase_params, dut, access_point, ota_chamber=None):
     """Checks if test should be skipped."""
     # Check battery level before test
     if not health_check(dut, 10):
-        asserts.skip('DUT battery level too low.')
-    if not access_point.band_lookup_by_channel(testcase_params['channel']):
-        asserts.skip('AP does not support requested channel.')
-    if ota_chamber and CHANNEL_TO_BAND_MAP[
-            testcase_params['channel']] not in ota_chamber.SUPPORTED_BANDS:
-        asserts.skip('OTA chamber does not support requested channel.')
+        asserts.skip("DUT battery level too low.")
+    if not access_point.band_lookup_by_channel(testcase_params["channel"]):
+        asserts.skip("AP does not support requested channel.")
+    if (
+        ota_chamber
+        and CHANNEL_TO_BAND_MAP[testcase_params["channel"]]
+        not in ota_chamber.SUPPORTED_BANDS
+    ):
+        asserts.skip("OTA chamber does not support requested channel.")
     # Check if 6GHz is supported by checking capabilities in the US.
     if not dut.droid.wifiCheckState():
         wutils.wifi_toggle_state(dut, True)
-    iw_list = dut.adb.shell('iw list')
-    supports_6ghz = '6135 MHz' in iw_list
-    supports_160mhz = 'Supported Channel Width: 160 MHz' in iw_list
-    if testcase_params.get('bandwidth', 20) == 160 and not supports_160mhz:
-        asserts.skip('DUT does not support 160 MHz networks.')
-    if testcase_params.get('channel',
-                           6) in CHANNELS_6GHz and not supports_6ghz:
-        asserts.skip('DUT does not support 6 GHz band.')
+    iw_list = dut.adb.shell("iw list")
+    supports_6ghz = "6135 MHz" in iw_list
+    supports_160mhz = "Supported Channel Width: 160 MHz" in iw_list
+    if testcase_params.get("bandwidth", 20) == 160 and not supports_160mhz:
+        asserts.skip("DUT does not support 160 MHz networks.")
+    if testcase_params.get("channel", 6) in CHANNELS_6GHz and not supports_6ghz:
+        asserts.skip("DUT does not support 6 GHz band.")
 
 
 def validate_network(dut, ssid):
@@ -162,7 +166,7 @@
     except:
         connected = False
         current_network = None
-    if connected and current_network['SSID'] == ssid:
+    if connected and current_network["SSID"] == ssid:
         return True
     else:
         return False
@@ -181,27 +185,27 @@
         dut_ip: string in ip address format, i.e., xxx.xxx.xxx.xxx
         subnet_mask: string representing subnet mask (public for global ip)
     """
-    ifconfig_out = ssh_connection.run('ifconfig').stdout
-    ip_list = re.findall('inet (?:addr:)?(\d+.\d+.\d+.\d+)', ifconfig_out)
+    ifconfig_out = ssh_connection.run("ifconfig").stdout
+    ip_list = re.findall("inet (?:addr:)?(\d+.\d+.\d+.\d+)", ifconfig_out)
     ip_list = [ipaddress.ip_address(ip) for ip in ip_list]
 
-    if subnet_mask == 'public':
+    if subnet_mask == "public":
         for ip in ip_list:
             # is_global is not used to allow for CGNAT ips in 100.x.y.z range
             if not ip.is_private:
                 return str(ip)
     else:
-        dut_network = ipaddress.ip_network('{}/{}'.format(dut_ip, subnet_mask),
-                                           strict=False)
+        dut_network = ipaddress.ip_network(
+            "{}/{}".format(dut_ip, subnet_mask), strict=False
+        )
         for ip in ip_list:
             if ip in dut_network:
                 return str(ip)
-    logging.error('No IP address found in requested subnet')
+    logging.error("No IP address found in requested subnet")
 
 
 # Ping utilities
-def get_ping_stats(src_device, dest_address, ping_duration, ping_interval,
-                   ping_size):
+def get_ping_stats(src_device, dest_address, ping_duration, ping_interval, ping_size):
     """Run ping to or from the DUT.
 
     The function computes either pings the DUT or pings a remote ip from
@@ -218,14 +222,14 @@
     """
     ping_count = int(ping_duration / ping_interval)
     ping_deadline = int(ping_count * ping_interval) + 1
-    ping_cmd_linux = 'ping -c {} -w {} -i {} -s {} -D'.format(
+    ping_cmd_linux = "ping -c {} -w {} -i {} -s {} -D".format(
         ping_count,
         ping_deadline,
         ping_interval,
         ping_size,
     )
 
-    ping_cmd_macos = 'ping -c {} -t {} -i {} -s {}'.format(
+    ping_cmd_macos = "ping -c {} -t {} -i {} -s {}".format(
         ping_count,
         ping_deadline,
         ping_interval,
@@ -233,48 +237,53 @@
     )
 
     if isinstance(src_device, AndroidDevice):
-        ping_cmd = '{} {}'.format(ping_cmd_linux, dest_address)
-        ping_output = src_device.adb.shell(ping_cmd,
-                                           timeout=ping_deadline + SHORT_SLEEP,
-                                           ignore_status=True)
+        ping_cmd = "{} {}".format(ping_cmd_linux, dest_address)
+        ping_output = src_device.adb.shell(
+            ping_cmd, timeout=ping_deadline + SHORT_SLEEP, ignore_status=True
+        )
     elif isinstance(src_device, ssh.connection.SshConnection):
-        platform = src_device.run('uname').stdout
-        if 'linux' in platform.lower():
-            ping_cmd = 'sudo {} {}'.format(ping_cmd_linux, dest_address)
-        elif 'darwin' in platform.lower():
+        platform = src_device.run("uname").stdout
+        if "linux" in platform.lower():
+            ping_cmd = "sudo {} {}".format(ping_cmd_linux, dest_address)
+        elif "darwin" in platform.lower():
             ping_cmd = "sudo {} {}| while IFS= read -r line; do printf '[%s] %s\n' \"$(gdate '+%s.%N')\" \"$line\"; done".format(
-                ping_cmd_macos, dest_address)
-        ping_output = src_device.run(ping_cmd,
-                                     timeout=ping_deadline + SHORT_SLEEP,
-                                     ignore_status=True).stdout
+                ping_cmd_macos, dest_address
+            )
+        ping_output = src_device.run(
+            ping_cmd, timeout=ping_deadline + SHORT_SLEEP, ignore_status=True
+        ).stdout
     else:
-        raise TypeError('Unable to ping using src_device of type %s.' %
-                        type(src_device))
+        raise TypeError(
+            "Unable to ping using src_device of type %s." % type(src_device)
+        )
     return ping_utils.PingResult(ping_output.splitlines())
 
 
 @nonblocking
-def get_ping_stats_nb(src_device, dest_address, ping_duration, ping_interval,
-                      ping_size):
-    return get_ping_stats(src_device, dest_address, ping_duration,
-                          ping_interval, ping_size)
+def get_ping_stats_nb(
+    src_device, dest_address, ping_duration, ping_interval, ping_size
+):
+    return get_ping_stats(
+        src_device, dest_address, ping_duration, ping_interval, ping_size
+    )
 
 
 # Iperf utilities
 @nonblocking
-def start_iperf_client_nb(iperf_client, iperf_server_address, iperf_args, tag,
-                          timeout):
+def start_iperf_client_nb(iperf_client, iperf_server_address, iperf_args, tag, timeout):
     return iperf_client.start(iperf_server_address, iperf_args, tag, timeout)
 
 
-def get_iperf_arg_string(duration,
-                         reverse_direction,
-                         interval=1,
-                         traffic_type='TCP',
-                         socket_size=None,
-                         num_processes=1,
-                         udp_throughput='1000M',
-                         ipv6=False):
+def get_iperf_arg_string(
+    duration,
+    reverse_direction,
+    interval=1,
+    traffic_type="TCP",
+    socket_size=None,
+    num_processes=1,
+    udp_throughput="1000M",
+    ipv6=False,
+):
     """Function to format iperf client arguments.
 
     This function takes in iperf client parameters and returns a properly
@@ -292,18 +301,19 @@
     Returns:
         iperf_args: string of formatted iperf args
     """
-    iperf_args = '-i {} -t {} -J '.format(interval, duration)
+    iperf_args = "-i {} -t {} -J ".format(interval, duration)
     if ipv6:
-        iperf_args = iperf_args + '-6 '
-    if traffic_type.upper() == 'UDP':
-        iperf_args = iperf_args + '-u -b {} -l 1470 -P {} '.format(
-            udp_throughput, num_processes)
-    elif traffic_type.upper() == 'TCP':
-        iperf_args = iperf_args + '-P {} '.format(num_processes)
+        iperf_args = iperf_args + "-6 "
+    if traffic_type.upper() == "UDP":
+        iperf_args = iperf_args + "-u -b {} -l 1470 -P {} ".format(
+            udp_throughput, num_processes
+        )
+    elif traffic_type.upper() == "TCP":
+        iperf_args = iperf_args + "-P {} ".format(num_processes)
     if socket_size:
-        iperf_args = iperf_args + '-w {} '.format(socket_size)
+        iperf_args = iperf_args + "-w {} ".format(socket_size)
     if reverse_direction:
-        iperf_args = iperf_args + ' -R'
+        iperf_args = iperf_args + " -R"
     return iperf_args
 
 
@@ -336,29 +346,34 @@
     Returns:
         target_atten: attenuation setting to achieve target_rssi
     """
-    logging.info('Searching attenuation for RSSI = {}dB'.format(target_rssi))
+    logging.info("Searching attenuation for RSSI = {}dB".format(target_rssi))
     # Set attenuator to 0 dB
     for atten in attenuators:
         atten.set_atten(0, strict=False, retry=True)
     # Start ping traffic
-    dut_ip = dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
+    dut_ip = dut.droid.connectivityGetIPv4Addresses("wlan0")[0]
     # Measure starting RSSI
-    ping_future = get_ping_stats_nb(src_device=ping_server,
-                                    dest_address=dut_ip,
-                                    ping_duration=1.5,
-                                    ping_interval=0.02,
-                                    ping_size=64)
-    current_rssi = get_connected_rssi(dut,
-                                      num_measurements=4,
-                                      polling_frequency=0.25,
-                                      first_measurement_delay=0.5,
-                                      disconnect_warning=1,
-                                      ignore_samples=1)
-    current_rssi = current_rssi['signal_poll_rssi']['mean']
+    ping_future = get_ping_stats_nb(
+        src_device=ping_server,
+        dest_address=dut_ip,
+        ping_duration=1.5,
+        ping_interval=0.02,
+        ping_size=64,
+    )
+    current_rssi = get_connected_rssi(
+        dut,
+        num_measurements=4,
+        polling_frequency=0.25,
+        first_measurement_delay=0.5,
+        disconnect_warning=1,
+        ignore_samples=1,
+    )
+    current_rssi = current_rssi["signal_poll_rssi"]["mean"]
     ping_future.result()
     target_atten = 0
-    logging.debug('RSSI @ {0:.2f}dB attenuation = {1:.2f}'.format(
-        target_atten, current_rssi))
+    logging.debug(
+        "RSSI @ {0:.2f}dB attenuation = {1:.2f}".format(target_atten, current_rssi)
+    )
     within_range = 0
     for idx in range(20):
         atten_delta = max(min(current_rssi - target_rssi, 20), -20)
@@ -369,27 +384,34 @@
             return attenuators[0].get_max_atten()
         for atten in attenuators:
             atten.set_atten(target_atten, strict=False, retry=True)
-        ping_future = get_ping_stats_nb(src_device=ping_server,
-                                        dest_address=dut_ip,
-                                        ping_duration=1.5,
-                                        ping_interval=0.02,
-                                        ping_size=64)
-        current_rssi = get_connected_rssi(dut,
-                                          num_measurements=4,
-                                          polling_frequency=0.25,
-                                          first_measurement_delay=0.5,
-                                          disconnect_warning=1,
-                                          ignore_samples=1)
-        current_rssi = current_rssi['signal_poll_rssi']['mean']
+        ping_future = get_ping_stats_nb(
+            src_device=ping_server,
+            dest_address=dut_ip,
+            ping_duration=1.5,
+            ping_interval=0.02,
+            ping_size=64,
+        )
+        current_rssi = get_connected_rssi(
+            dut,
+            num_measurements=4,
+            polling_frequency=0.25,
+            first_measurement_delay=0.5,
+            disconnect_warning=1,
+            ignore_samples=1,
+        )
+        current_rssi = current_rssi["signal_poll_rssi"]["mean"]
         ping_future.result()
-        logging.info('RSSI @ {0:.2f}dB attenuation = {1:.2f}'.format(
-            target_atten, current_rssi))
+        logging.info(
+            "RSSI @ {0:.2f}dB attenuation = {1:.2f}".format(target_atten, current_rssi)
+        )
         if abs(current_rssi - target_rssi) < 1:
             if within_range:
                 logging.info(
-                    'Reached RSSI: {0:.2f}. Target RSSI: {1:.2f}.'
-                    'Attenuation: {2:.2f}, Iterations = {3:.2f}'.format(
-                        current_rssi, target_rssi, target_atten, idx))
+                    "Reached RSSI: {0:.2f}. Target RSSI: {1:.2f}."
+                    "Attenuation: {2:.2f}, Iterations = {3:.2f}".format(
+                        current_rssi, target_rssi, target_atten, idx
+                    )
+                )
                 return target_atten
             else:
                 within_range = True
@@ -398,10 +420,7 @@
     return target_atten
 
 
-def get_current_atten_dut_chain_map(attenuators,
-                                    dut,
-                                    ping_server,
-                                    ping_from_dut=False):
+def get_current_atten_dut_chain_map(attenuators, dut, ping_server, ping_from_dut=False):
     """Function to detect mapping between attenuator ports and DUT chains.
 
     This function detects the mapping between attenuator ports and DUT chains
@@ -423,18 +442,19 @@
     for atten in attenuators:
         atten.set_atten(0, strict=False, retry=True)
     # Start ping traffic
-    dut_ip = dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
+    dut_ip = dut.droid.connectivityGetIPv4Addresses("wlan0")[0]
     if ping_from_dut:
-        ping_future = get_ping_stats_nb(dut, ping_server._settings.hostname,
-                                        11, 0.02, 64)
+        ping_future = get_ping_stats_nb(
+            dut, ping_server._settings.hostname, 11, 0.02, 64
+        )
     else:
         ping_future = get_ping_stats_nb(ping_server, dut_ip, 11, 0.02, 64)
     # Measure starting RSSI
     base_rssi = get_connected_rssi(dut, 4, 0.25, 1)
-    chain0_base_rssi = base_rssi['chain_0_rssi']['mean']
-    chain1_base_rssi = base_rssi['chain_1_rssi']['mean']
+    chain0_base_rssi = base_rssi["chain_0_rssi"]["mean"]
+    chain1_base_rssi = base_rssi["chain_1_rssi"]["mean"]
     if chain0_base_rssi < -70 or chain1_base_rssi < -70:
-        logging.warning('RSSI might be too low to get reliable chain map.')
+        logging.warning("RSSI might be too low to get reliable chain map.")
     # Compile chain map by attenuating one path at a time and seeing which
     # chain's RSSI degrades
     chain_map = []
@@ -444,26 +464,28 @@
         # Get new RSSI
         test_rssi = get_connected_rssi(dut, 4, 0.25, 1)
         # Assign attenuator to path that has lower RSSI
-        if chain0_base_rssi > -70 and chain0_base_rssi - test_rssi[
-                'chain_0_rssi']['mean'] > 10:
-            chain_map.append('DUT-Chain-0')
-        elif chain1_base_rssi > -70 and chain1_base_rssi - test_rssi[
-                'chain_1_rssi']['mean'] > 10:
-            chain_map.append('DUT-Chain-1')
+        if (
+            chain0_base_rssi > -70
+            and chain0_base_rssi - test_rssi["chain_0_rssi"]["mean"] > 10
+        ):
+            chain_map.append("DUT-Chain-0")
+        elif (
+            chain1_base_rssi > -70
+            and chain1_base_rssi - test_rssi["chain_1_rssi"]["mean"] > 10
+        ):
+            chain_map.append("DUT-Chain-1")
         else:
             chain_map.append(None)
         # Reset attenuator to 0
         test_atten.set_atten(0, strict=False, retry=True)
     ping_future.result()
-    logging.debug('Chain Map: {}'.format(chain_map))
+    logging.debug("Chain Map: {}".format(chain_map))
     return chain_map
 
 
-def get_full_rf_connection_map(attenuators,
-                               dut,
-                               ping_server,
-                               networks,
-                               ping_from_dut=False):
+def get_full_rf_connection_map(
+    attenuators, dut, ping_server, networks, ping_from_dut=False
+):
     """Function to detect per-network connections between attenuator and DUT.
 
     This function detects the mapping between attenuator ports and DUT chains
@@ -489,21 +511,21 @@
     rf_map_by_atten = [[] for atten in attenuators]
     for net_id, net_config in networks.items():
         wutils.reset_wifi(dut)
-        wutils.wifi_connect(dut,
-                            net_config,
-                            num_of_tries=1,
-                            assert_on_fail=False,
-                            check_connectivity=False)
+        wutils.wifi_connect(
+            dut,
+            net_config,
+            num_of_tries=1,
+            assert_on_fail=False,
+            check_connectivity=False,
+        )
         rf_map_by_network[net_id] = get_current_atten_dut_chain_map(
-            attenuators, dut, ping_server, ping_from_dut)
+            attenuators, dut, ping_server, ping_from_dut
+        )
         for idx, chain in enumerate(rf_map_by_network[net_id]):
             if chain:
-                rf_map_by_atten[idx].append({
-                    'network': net_id,
-                    'dut_chain': chain
-                })
-    logging.debug('RF Map (by Network): {}'.format(rf_map_by_network))
-    logging.debug('RF Map (by Atten): {}'.format(rf_map_by_atten))
+                rf_map_by_atten[idx].append({"network": net_id, "dut_chain": chain})
+    logging.debug("RF Map (by Network): {}".format(rf_map_by_network))
+    logging.debug("RF Map (by Atten): {}".format(rf_map_by_atten))
 
     return rf_map_by_network, rf_map_by_atten
 
@@ -521,20 +543,20 @@
         temperature: device temperature. 0 if temperature could not be read
     """
     candidate_zones = [
-        '/sys/devices/virtual/thermal/tz-by-name/skin-therm/temp',
-        '/sys/devices/virtual/thermal/tz-by-name/sdm-therm-monitor/temp',
-        '/sys/devices/virtual/thermal/tz-by-name/sdm-therm-adc/temp',
-        '/sys/devices/virtual/thermal/tz-by-name/back_therm/temp',
-        '/dev/thermal/tz-by-name/quiet_therm/temp'
+        "/sys/devices/virtual/thermal/tz-by-name/skin-therm/temp",
+        "/sys/devices/virtual/thermal/tz-by-name/sdm-therm-monitor/temp",
+        "/sys/devices/virtual/thermal/tz-by-name/sdm-therm-adc/temp",
+        "/sys/devices/virtual/thermal/tz-by-name/back_therm/temp",
+        "/dev/thermal/tz-by-name/quiet_therm/temp",
     ]
     for zone in candidate_zones:
         try:
-            temperature = int(dut.adb.shell('cat {}'.format(zone)))
+            temperature = int(dut.adb.shell("cat {}".format(zone)))
             break
         except:
             temperature = 0
     if temperature == 0:
-        logging.debug('Could not check DUT temperature.')
+        logging.debug("Could not check DUT temperature.")
     elif temperature > 100:
         temperature = temperature / 1000
     return temperature
@@ -555,8 +577,11 @@
             break
         time.sleep(SHORT_SLEEP)
     elapsed_time = time.time() - start_time
-    logging.debug('DUT Final Temperature: {}C. Cooldown duration: {}'.format(
-        temperature, elapsed_time))
+    logging.debug(
+        "DUT Final Temperature: {}C. Cooldown duration: {}".format(
+            temperature, elapsed_time
+        )
+    )
 
 
 def health_check(dut, batt_thresh=5, temp_threshold=53, cooldown=1):
@@ -576,52 +601,62 @@
     health_check = True
     battery_level = utils.get_battery_level(dut)
     if battery_level < batt_thresh:
-        logging.warning('Battery level low ({}%)'.format(battery_level))
+        logging.warning("Battery level low ({}%)".format(battery_level))
         health_check = False
     else:
-        logging.debug('Battery level = {}%'.format(battery_level))
+        logging.debug("Battery level = {}%".format(battery_level))
 
     temperature = get_dut_temperature(dut)
     if temperature > temp_threshold:
         if cooldown:
-            logging.warning(
-                'Waiting for DUT to cooldown. ({} C)'.format(temperature))
+            logging.warning("Waiting for DUT to cooldown. ({} C)".format(temperature))
             wait_for_dut_cooldown(dut, target_temp=temp_threshold - 5)
         else:
-            logging.warning('DUT Overheating ({} C)'.format(temperature))
+            logging.warning("DUT Overheating ({} C)".format(temperature))
             health_check = False
     else:
-        logging.debug('DUT Temperature = {} C'.format(temperature))
+        logging.debug("DUT Temperature = {} C".format(temperature))
     return health_check
 
 
 # Wifi Device Utils
 def empty_rssi_result():
-    return collections.OrderedDict([('data', []), ('mean', float('nan')),
-                                    ('stdev', float('nan'))])
+    return collections.OrderedDict(
+        [("data", []), ("mean", float("nan")), ("stdev", float("nan"))]
+    )
 
 
 @nonblocking
-def get_connected_rssi_nb(dut,
-                          num_measurements=1,
-                          polling_frequency=SHORT_SLEEP,
-                          first_measurement_delay=0,
-                          disconnect_warning=True,
-                          ignore_samples=0,
-                          interface='wlan0'):
-    return get_connected_rssi(dut, num_measurements, polling_frequency,
-                              first_measurement_delay, disconnect_warning,
-                              ignore_samples, interface)
+def get_connected_rssi_nb(
+    dut,
+    num_measurements=1,
+    polling_frequency=SHORT_SLEEP,
+    first_measurement_delay=0,
+    disconnect_warning=True,
+    ignore_samples=0,
+    interface="wlan0",
+):
+    return get_connected_rssi(
+        dut,
+        num_measurements,
+        polling_frequency,
+        first_measurement_delay,
+        disconnect_warning,
+        ignore_samples,
+        interface,
+    )
 
 
 @detect_wifi_decorator
-def get_connected_rssi(dut,
-                       num_measurements=1,
-                       polling_frequency=SHORT_SLEEP,
-                       first_measurement_delay=0,
-                       disconnect_warning=True,
-                       ignore_samples=0,
-                       interface='wlan0'):
+def get_connected_rssi(
+    dut,
+    num_measurements=1,
+    polling_frequency=SHORT_SLEEP,
+    first_measurement_delay=0,
+    disconnect_warning=True,
+    ignore_samples=0,
+    interface="wlan0",
+):
     """Gets all RSSI values reported for the connected access point/BSSID.
 
     Args:
@@ -727,9 +762,9 @@
 
 
 # Link layer stats utilities
-class LinkLayerStats():
+class LinkLayerStats:
     def __new__(self, dut, llstats_enabled=True):
-        if detect_wifi_platform(dut) == 'qcom':
+        if detect_wifi_platform(dut) == "qcom":
             return qcom_utils.LinkLayerStats(dut, llstats_enabled)
         else:
             return brcm_utils.LinkLayerStats(dut, llstats_enabled)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py
index d6c5e39..0c9aec3 100644
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py
+++ b/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py
@@ -22,103 +22,115 @@
 
 
 # Plotting Utilities
-class BokehFigure():
+class BokehFigure:
     """Class enabling  simplified Bokeh plotting."""
 
     COLORS = [
-        'black',
-        'blue',
-        'blueviolet',
-        'brown',
-        'burlywood',
-        'cadetblue',
-        'cornflowerblue',
-        'crimson',
-        'cyan',
-        'darkblue',
-        'darkgreen',
-        'darkmagenta',
-        'darkorange',
-        'darkred',
-        'deepskyblue',
-        'goldenrod',
-        'green',
-        'grey',
-        'indigo',
-        'navy',
-        'olive',
-        'orange',
-        'red',
-        'salmon',
-        'teal',
-        'yellow',
+        "black",
+        "blue",
+        "blueviolet",
+        "brown",
+        "burlywood",
+        "cadetblue",
+        "cornflowerblue",
+        "crimson",
+        "cyan",
+        "darkblue",
+        "darkgreen",
+        "darkmagenta",
+        "darkorange",
+        "darkred",
+        "deepskyblue",
+        "goldenrod",
+        "green",
+        "grey",
+        "indigo",
+        "navy",
+        "olive",
+        "orange",
+        "red",
+        "salmon",
+        "teal",
+        "yellow",
     ]
     MARKERS = [
-        'asterisk', 'circle', 'circle_cross', 'circle_x', 'cross', 'diamond',
-        'diamond_cross', 'hex', 'inverted_triangle', 'square', 'square_x',
-        'square_cross', 'triangle', 'x'
+        "asterisk",
+        "circle",
+        "circle_cross",
+        "circle_x",
+        "cross",
+        "diamond",
+        "diamond_cross",
+        "hex",
+        "inverted_triangle",
+        "square",
+        "square_x",
+        "square_cross",
+        "triangle",
+        "x",
     ]
 
-    TOOLS = ('box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save')
+    TOOLS = "box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save"
 
-    def __init__(self,
-                 title=None,
-                 x_label=None,
-                 primary_y_label=None,
-                 secondary_y_label=None,
-                 height=700,
-                 width=1100,
-                 title_size='15pt',
-                 axis_label_size='12pt',
-                 legend_label_size='12pt',
-                 axis_tick_label_size='12pt',
-                 x_axis_type='auto',
-                 sizing_mode='scale_both',
-                 json_file=None):
+    def __init__(
+        self,
+        title=None,
+        x_label=None,
+        primary_y_label=None,
+        secondary_y_label=None,
+        height=700,
+        width=1100,
+        title_size="15pt",
+        axis_label_size="12pt",
+        legend_label_size="12pt",
+        axis_tick_label_size="12pt",
+        x_axis_type="auto",
+        sizing_mode="scale_both",
+        json_file=None,
+    ):
         if json_file:
             self.load_from_json(json_file)
         else:
             self.figure_data = []
             self.fig_property = {
-                'title': title,
-                'x_label': x_label,
-                'primary_y_label': primary_y_label,
-                'secondary_y_label': secondary_y_label,
-                'num_lines': 0,
-                'height': height,
-                'width': width,
-                'title_size': title_size,
-                'axis_label_size': axis_label_size,
-                'legend_label_size': legend_label_size,
-                'axis_tick_label_size': axis_tick_label_size,
-                'x_axis_type': x_axis_type,
-                'sizing_mode': sizing_mode
+                "title": title,
+                "x_label": x_label,
+                "primary_y_label": primary_y_label,
+                "secondary_y_label": secondary_y_label,
+                "num_lines": 0,
+                "height": height,
+                "width": width,
+                "title_size": title_size,
+                "axis_label_size": axis_label_size,
+                "legend_label_size": legend_label_size,
+                "axis_tick_label_size": axis_tick_label_size,
+                "x_axis_type": x_axis_type,
+                "sizing_mode": sizing_mode,
             }
 
     def init_plot(self):
         self.plot = bokeh.plotting.figure(
-            sizing_mode=self.fig_property['sizing_mode'],
-            plot_width=self.fig_property['width'],
-            plot_height=self.fig_property['height'],
-            title=self.fig_property['title'],
+            sizing_mode=self.fig_property["sizing_mode"],
+            plot_width=self.fig_property["width"],
+            plot_height=self.fig_property["height"],
+            title=self.fig_property["title"],
             tools=self.TOOLS,
-            x_axis_type=self.fig_property['x_axis_type'],
-            output_backend='webgl')
+            x_axis_type=self.fig_property["x_axis_type"],
+            output_backend="webgl",
+        )
         tooltips = [
-            ('index', '$index'),
-            ('(x,y)', '($x, $y)'),
+            ("index", "$index"),
+            ("(x,y)", "($x, $y)"),
         ]
         hover_set = []
         for line in self.figure_data:
-            hover_set.extend(line['hover_text'].keys())
+            hover_set.extend(line["hover_text"].keys())
         hover_set = set(hover_set)
         for item in hover_set:
-            tooltips.append((item, '@{}'.format(item)))
+            tooltips.append((item, "@{}".format(item)))
         self.plot.hover.tooltips = tooltips
-        self.plot.add_tools(
-            bokeh.models.tools.WheelZoomTool(dimensions='width'))
-        self.plot.add_tools(
-            bokeh.models.tools.WheelZoomTool(dimensions='height'))
+        self.plot.add_tools(bokeh.models.tools.WheelZoomTool(dimensions="width"))
+        self.plot.add_tools(bokeh.models.tools.WheelZoomTool(dimensions="height"))
 
     def _filter_line(self, x_data, y_data, hover_text=None):
         """Function to remove NaN points from bokeh plots."""
@@ -126,7 +138,8 @@
         y_data_filtered = []
         hover_text_filtered = {}
         for idx, xy in enumerate(
-                itertools.zip_longest(x_data, y_data, fillvalue=float('nan'))):
+            itertools.zip_longest(x_data, y_data, fillvalue=float("nan"))
+        ):
             if not math.isnan(xy[1]):
                 x_data_filtered.append(xy[0])
                 y_data_filtered.append(xy[1])
@@ -134,21 +147,24 @@
                     for key, value in hover_text.items():
                         hover_text_filtered.setdefault(key, [])
                         hover_text_filtered[key].append(
-                            value[idx] if len(value) > idx else '')
+                            value[idx] if len(value) > idx else ""
+                        )
         return x_data_filtered, y_data_filtered, hover_text_filtered
 
-    def add_line(self,
-                 x_data,
-                 y_data,
-                 legend,
-                 hover_text=None,
-                 color=None,
-                 width=3,
-                 style='solid',
-                 marker=None,
-                 marker_size=10,
-                 shaded_region=None,
-                 y_axis='default'):
+    def add_line(
+        self,
+        x_data,
+        y_data,
+        legend,
+        hover_text=None,
+        color=None,
+        width=3,
+        style="solid",
+        marker=None,
+        marker_size=10,
+        shaded_region=None,
+        y_axis="default",
+    ):
         """Function to add line to existing BokehFigure.
 
         Args:
@@ -163,41 +179,45 @@
             shaded region: data describing shaded region to plot
             y_axis: identifier for y-axis to plot line against
         """
-        if y_axis not in ['default', 'secondary']:
-            raise ValueError('y_axis must be default or secondary')
+        if y_axis not in ["default", "secondary"]:
+            raise ValueError("y_axis must be default or secondary")
         if color == None:
-            color = self.COLORS[self.fig_property['num_lines'] %
-                                len(self.COLORS)]
-        if style == 'dashed':
+            color = self.COLORS[self.fig_property["num_lines"] % len(self.COLORS)]
+        if style == "dashed":
             style = [5, 5]
         if isinstance(hover_text, list):
-            hover_text = {'info': hover_text}
+            hover_text = {"info": hover_text}
         x_data_filter, y_data_filter, hover_text_filter = self._filter_line(
-            x_data, y_data, hover_text)
-        self.figure_data.append({
-            'x_data': x_data_filter,
-            'y_data': y_data_filter,
-            'legend': legend,
-            'hover_text': hover_text_filter,
-            'color': color,
-            'width': width,
-            'style': style,
-            'marker': marker,
-            'marker_size': marker_size,
-            'shaded_region': shaded_region,
-            'y_axis': y_axis
-        })
-        self.fig_property['num_lines'] += 1
+            x_data, y_data, hover_text
+        )
+        self.figure_data.append(
+            {
+                "x_data": x_data_filter,
+                "y_data": y_data_filter,
+                "legend": legend,
+                "hover_text": hover_text_filter,
+                "color": color,
+                "width": width,
+                "style": style,
+                "marker": marker,
+                "marker_size": marker_size,
+                "shaded_region": shaded_region,
+                "y_axis": y_axis,
+            }
+        )
+        self.fig_property["num_lines"] += 1
 
-    def add_scatter(self,
-                    x_data,
-                    y_data,
-                    legend,
-                    hover_text=None,
-                    color=None,
-                    marker=None,
-                    marker_size=10,
-                    y_axis='default'):
+    def add_scatter(
+        self,
+        x_data,
+        y_data,
+        legend,
+        hover_text=None,
+        color=None,
+        marker=None,
+        marker_size=10,
+        y_axis="default",
+    ):
         """Function to add line to existing BokehFigure.
 
         Args:
@@ -209,28 +229,28 @@
             marker: string specifying marker, e.g., cross
             y_axis: identifier for y-axis to plot line against
         """
-        if y_axis not in ['default', 'secondary']:
-            raise ValueError('y_axis must be default or secondary')
+        if y_axis not in ["default", "secondary"]:
+            raise ValueError("y_axis must be default or secondary")
         if color == None:
-            color = self.COLORS[self.fig_property['num_lines'] %
-                                len(self.COLORS)]
+            color = self.COLORS[self.fig_property["num_lines"] % len(self.COLORS)]
         if marker == None:
-            marker = self.MARKERS[self.fig_property['num_lines'] %
-                                  len(self.MARKERS)]
-        self.figure_data.append({
-            'x_data': x_data,
-            'y_data': y_data,
-            'legend': legend,
-            'hover_text': hover_text,
-            'color': color,
-            'width': 0,
-            'style': 'solid',
-            'marker': marker,
-            'marker_size': marker_size,
-            'shaded_region': None,
-            'y_axis': y_axis
-        })
-        self.fig_property['num_lines'] += 1
+            marker = self.MARKERS[self.fig_property["num_lines"] % len(self.MARKERS)]
+        self.figure_data.append(
+            {
+                "x_data": x_data,
+                "y_data": y_data,
+                "legend": legend,
+                "hover_text": hover_text,
+                "color": color,
+                "width": 0,
+                "style": "solid",
+                "marker": marker,
+                "marker_size": marker_size,
+                "shaded_region": None,
+                "y_axis": y_axis,
+            }
+        )
+        self.fig_property["num_lines"] += 1
 
     def generate_figure(self, output_file=None, save_json=True):
         """Function to generate and save BokehFigure.
@@ -241,91 +261,99 @@
         self.init_plot()
         two_axes = False
         for line in self.figure_data:
-            data_dict = {'x': line['x_data'], 'y': line['y_data']}
-            for key, value in line['hover_text'].items():
+            data_dict = {"x": line["x_data"], "y": line["y_data"]}
+            for key, value in line["hover_text"].items():
                 data_dict[key] = value
             source = bokeh.models.ColumnDataSource(data=data_dict)
-            if line['width'] > 0:
-                self.plot.line(x='x',
-                               y='y',
-                               legend_label=line['legend'],
-                               line_width=line['width'],
-                               color=line['color'],
-                               line_dash=line['style'],
-                               name=line['y_axis'],
-                               y_range_name=line['y_axis'],
-                               source=source)
-            if line['shaded_region']:
-                band_x = line['shaded_region']['x_vector']
-                band_x.extend(line['shaded_region']['x_vector'][::-1])
-                band_y = line['shaded_region']['lower_limit']
-                band_y.extend(line['shaded_region']['upper_limit'][::-1])
-                self.plot.patch(band_x,
-                                band_y,
-                                color='#7570B3',
-                                line_alpha=0.1,
-                                fill_alpha=0.1)
-            if line['marker'] in self.MARKERS:
-                marker_func = getattr(self.plot, line['marker'])
-                marker_func(x='x',
-                            y='y',
-                            size=line['marker_size'],
-                            legend_label=line['legend'],
-                            line_color=line['color'],
-                            fill_color=line['color'],
-                            name=line['y_axis'],
-                            y_range_name=line['y_axis'],
-                            source=source)
-            if line['y_axis'] == 'secondary':
+            if line["width"] > 0:
+                self.plot.line(
+                    x="x",
+                    y="y",
+                    legend_label=line["legend"],
+                    line_width=line["width"],
+                    color=line["color"],
+                    line_dash=line["style"],
+                    name=line["y_axis"],
+                    y_range_name=line["y_axis"],
+                    source=source,
+                )
+            if line["shaded_region"]:
+                band_x = line["shaded_region"]["x_vector"]
+                band_x.extend(line["shaded_region"]["x_vector"][::-1])
+                band_y = line["shaded_region"]["lower_limit"]
+                band_y.extend(line["shaded_region"]["upper_limit"][::-1])
+                self.plot.patch(
+                    band_x, band_y, color="#7570B3", line_alpha=0.1, fill_alpha=0.1
+                )
+            if line["marker"] in self.MARKERS:
+                marker_func = getattr(self.plot, line["marker"])
+                marker_func(
+                    x="x",
+                    y="y",
+                    size=line["marker_size"],
+                    legend_label=line["legend"],
+                    line_color=line["color"],
+                    fill_color=line["color"],
+                    name=line["y_axis"],
+                    y_range_name=line["y_axis"],
+                    source=source,
+                )
+            if line["y_axis"] == "secondary":
                 two_axes = True
 
-        #x-axis formatting
-        self.plot.xaxis.axis_label = self.fig_property['x_label']
+        # x-axis formatting
+        self.plot.xaxis.axis_label = self.fig_property["x_label"]
         self.plot.x_range.range_padding = 0
         self.plot.xaxis[0].axis_label_text_font_size = self.fig_property[
-            'axis_label_size']
+            "axis_label_size"
+        ]
         self.plot.xaxis.major_label_text_font_size = self.fig_property[
-            'axis_tick_label_size']
-        #y-axis formatting
-        self.plot.yaxis[0].axis_label = self.fig_property['primary_y_label']
+            "axis_tick_label_size"
+        ]
+        # y-axis formatting
+        self.plot.yaxis[0].axis_label = self.fig_property["primary_y_label"]
         self.plot.yaxis[0].axis_label_text_font_size = self.fig_property[
-            'axis_label_size']
+            "axis_label_size"
+        ]
         self.plot.yaxis.major_label_text_font_size = self.fig_property[
-            'axis_tick_label_size']
-        self.plot.y_range = bokeh.models.DataRange1d(names=['default'])
-        if two_axes and 'secondary' not in self.plot.extra_y_ranges:
+            "axis_tick_label_size"
+        ]
+        self.plot.y_range = bokeh.models.DataRange1d(names=["default"])
+        if two_axes and "secondary" not in self.plot.extra_y_ranges:
             self.plot.extra_y_ranges = {
-                'secondary': bokeh.models.DataRange1d(names=['secondary'])
+                "secondary": bokeh.models.DataRange1d(names=["secondary"])
             }
             self.plot.add_layout(
                 bokeh.models.LinearAxis(
-                    y_range_name='secondary',
-                    axis_label=self.fig_property['secondary_y_label'],
-                    axis_label_text_font_size=self.
-                    fig_property['axis_label_size']), 'right')
+                    y_range_name="secondary",
+                    axis_label=self.fig_property["secondary_y_label"],
+                    axis_label_text_font_size=self.fig_property["axis_label_size"],
+                ),
+                "right",
+            )
         # plot formatting
-        self.plot.legend.location = 'top_right'
-        self.plot.legend.click_policy = 'hide'
-        self.plot.title.text_font_size = self.fig_property['title_size']
-        self.plot.legend.label_text_font_size = self.fig_property[
-            'legend_label_size']
+        self.plot.legend.location = "top_right"
+        self.plot.legend.click_policy = "hide"
+        self.plot.title.text_font_size = self.fig_property["title_size"]
+        self.plot.legend.label_text_font_size = self.fig_property["legend_label_size"]
 
         if output_file is not None:
             self.save_figure(output_file, save_json)
         return self.plot
 
     def load_from_json(self, file_path):
-        with open(file_path, 'r') as json_file:
+        with open(file_path, "r") as json_file:
             fig_dict = json.load(json_file)
-        self.fig_property = fig_dict['fig_property']
-        self.figure_data = fig_dict['figure_data']
+        self.fig_property = fig_dict["fig_property"]
+        self.figure_data = fig_dict["figure_data"]
 
     def _save_figure_json(self, output_file):
         """Function to save a json format of a figure"""
-        figure_dict = collections.OrderedDict(fig_property=self.fig_property,
-                                              figure_data=self.figure_data)
-        output_file = output_file.replace('.html', '_plot_data.json')
-        with open(output_file, 'w') as outfile:
+        figure_dict = collections.OrderedDict(
+            fig_property=self.fig_property, figure_data=self.figure_data
+        )
+        output_file = output_file.replace(".html", "_plot_data.json")
+        with open(output_file, "w") as outfile:
             json.dump(figure_dict, outfile, indent=4)
 
     def save_figure(self, output_file, save_json=True):
@@ -352,10 +380,10 @@
             figure.generate_figure()
             if save_json:
                 json_file_path = output_file_path.replace(
-                    '.html', '{}-plot_data.json'.format(idx))
+                    ".html", "{}-plot_data.json".format(idx)
+                )
                 figure._save_figure_json(json_file_path)
         plot_array = [figure.plot for figure in figure_array]
-        all_plots = bokeh.layouts.column(children=plot_array,
-                                         sizing_mode='scale_width')
+        all_plots = bokeh.layouts.column(children=plot_array, sizing_mode="scale_width")
         bokeh.plotting.output_file(output_file_path)
         bokeh.plotting.save(all_plots)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py
index 7544f8d..2c0dc4c 100644
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py
+++ b/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py
@@ -27,91 +27,230 @@
 VERY_SHORT_SLEEP = 0.5
 SHORT_SLEEP = 1
 MED_SLEEP = 6
-DISCONNECTION_MESSAGE_BRCM = 'driver adapter not found'
-RSSI_ERROR_VAL = float('nan')
+DISCONNECTION_MESSAGE_BRCM = "driver adapter not found"
+RSSI_ERROR_VAL = float("nan")
 RATE_TABLE = {
-    'HT': {
+    "HT": {
         1: {
             20: [7.2, 14.4, 21.7, 28.9, 43.4, 57.8, 65.0, 72.2],
-            40: [15.0, 30.0, 45.0, 60.0, 90.0, 120.0, 135.0, 150.0]
+            40: [15.0, 30.0, 45.0, 60.0, 90.0, 120.0, 135.0, 150.0],
         },
         2: {
             20: [
-                0, 0, 0, 0, 0, 0, 0, 0, 14.4, 28.8, 43.4, 57.8, 86.8, 115.6,
-                130, 144.4
+                0,
+                0,
+                0,
+                0,
+                0,
+                0,
+                0,
+                0,
+                14.4,
+                28.8,
+                43.4,
+                57.8,
+                86.8,
+                115.6,
+                130,
+                144.4,
             ],
-            40: [0, 0, 0, 0, 0, 0, 0, 0, 30, 60, 90, 120, 180, 240, 270, 300]
-        }
+            40: [0, 0, 0, 0, 0, 0, 0, 0, 30, 60, 90, 120, 180, 240, 270, 300],
+        },
     },
-    'VHT': {
+    "VHT": {
         1: {
             20: [
-                7.2, 14.4, 21.7, 28.9, 43.4, 57.8, 65.0, 72.2, 86.7, 96.2,
-                129.0, 143.4
+                7.2,
+                14.4,
+                21.7,
+                28.9,
+                43.4,
+                57.8,
+                65.0,
+                72.2,
+                86.7,
+                96.2,
+                129.0,
+                143.4,
             ],
             40: [
-                15.0, 30.0, 45.0, 60.0, 90.0, 120.0, 135.0, 150.0, 180.0,
-                200.0, 258, 286.8
+                15.0,
+                30.0,
+                45.0,
+                60.0,
+                90.0,
+                120.0,
+                135.0,
+                150.0,
+                180.0,
+                200.0,
+                258,
+                286.8,
             ],
             80: [
-                32.5, 65.0, 97.5, 130.0, 195.0, 260.0, 292.5, 325.0, 390.0,
-                433.3, 540.4, 600.4
+                32.5,
+                65.0,
+                97.5,
+                130.0,
+                195.0,
+                260.0,
+                292.5,
+                325.0,
+                390.0,
+                433.3,
+                540.4,
+                600.4,
             ],
             160: [
-                65.0, 130.0, 195.0, 260.0, 390.0, 520.0, 585.0, 650.0, 780.0,
-                1080.8, 1200.8
-            ]
+                65.0,
+                130.0,
+                195.0,
+                260.0,
+                390.0,
+                520.0,
+                585.0,
+                650.0,
+                780.0,
+                1080.8,
+                1200.8,
+            ],
         },
         2: {
             20: [
-                14.4, 28.8, 43.4, 57.8, 86.8, 115.6, 130, 144.4, 173.4, 192.4,
-                258, 286.8
+                14.4,
+                28.8,
+                43.4,
+                57.8,
+                86.8,
+                115.6,
+                130,
+                144.4,
+                173.4,
+                192.4,
+                258,
+                286.8,
             ],
             40: [30, 60, 90, 120, 180, 240, 270, 300, 360, 400, 516, 573.6],
-            80: [
-                65, 130, 195, 260, 390, 520, 585, 650, 780, 866.6, 1080.8,
-                1200.8
-            ],
-            160:
-            [130, 260, 390, 520, 780, 1040, 1170, 1300, 1560, 2161.6, 2401.6]
+            80: [65, 130, 195, 260, 390, 520, 585, 650, 780, 866.6, 1080.8, 1200.8],
+            160: [130, 260, 390, 520, 780, 1040, 1170, 1300, 1560, 2161.6, 2401.6],
         },
     },
-    'HE': {
+    "HE": {
         1: {
             20: [
-                8.6, 17.2, 25.8, 34.4, 51.6, 68.8, 77.4, 86.0, 103.2, 114.7,
-                129.0, 143.4
+                8.6,
+                17.2,
+                25.8,
+                34.4,
+                51.6,
+                68.8,
+                77.4,
+                86.0,
+                103.2,
+                114.7,
+                129.0,
+                143.4,
             ],
             40: [
-                17.2, 34.4, 51.6, 68.8, 103.2, 137.6, 154.8, 172, 206.4, 229.4,
-                258, 286.8
+                17.2,
+                34.4,
+                51.6,
+                68.8,
+                103.2,
+                137.6,
+                154.8,
+                172,
+                206.4,
+                229.4,
+                258,
+                286.8,
             ],
             80: [
-                36.0, 72.1, 108.1, 144.1, 216.2, 288.2, 324.3, 360.3, 432.4,
-                480.4, 540.4, 600.4
+                36.0,
+                72.1,
+                108.1,
+                144.1,
+                216.2,
+                288.2,
+                324.3,
+                360.3,
+                432.4,
+                480.4,
+                540.4,
+                600.4,
             ],
             160: [
-                72, 144.2, 216.2, 288.2, 432.4, 576.4, 648.6, 720.6, 864.8,
-                960.8, 1080.8, 1200.8
-            ]
+                72,
+                144.2,
+                216.2,
+                288.2,
+                432.4,
+                576.4,
+                648.6,
+                720.6,
+                864.8,
+                960.8,
+                1080.8,
+                1200.8,
+            ],
         },
         2: {
             20: [
-                17.2, 34.4, 51.6, 68.8, 103.2, 137.6, 154.8, 172, 206.4, 229.4,
-                258, 286.8
+                17.2,
+                34.4,
+                51.6,
+                68.8,
+                103.2,
+                137.6,
+                154.8,
+                172,
+                206.4,
+                229.4,
+                258,
+                286.8,
             ],
             40: [
-                34.4, 68.8, 103.2, 137.6, 206.4, 275.2, 309.6, 344, 412.8,
-                458.8, 516, 573.6
+                34.4,
+                68.8,
+                103.2,
+                137.6,
+                206.4,
+                275.2,
+                309.6,
+                344,
+                412.8,
+                458.8,
+                516,
+                573.6,
             ],
             80: [
-                72, 144.2, 216.2, 288.2, 432.4, 576.4, 648.6, 720.6, 864.8,
-                960.8, 1080.8, 1200.8
+                72,
+                144.2,
+                216.2,
+                288.2,
+                432.4,
+                576.4,
+                648.6,
+                720.6,
+                864.8,
+                960.8,
+                1080.8,
+                1200.8,
             ],
             160: [
-                144, 288.4, 432.4, 576.4, 864.8, 1152.8, 1297.2, 1441.2,
-                1729.6, 1921.6, 2161.6, 2401.6
-            ]
+                144,
+                288.4,
+                432.4,
+                576.4,
+                864.8,
+                1152.8,
+                1297.2,
+                1441.2,
+                1729.6,
+                1921.6,
+                2161.6,
+                2401.6,
+            ],
         },
     },
 }
@@ -119,17 +258,18 @@
 
 # Rssi Utilities
 def empty_rssi_result():
-    return collections.OrderedDict([('data', []), ('mean', None),
-                                    ('stdev', None)])
+    return collections.OrderedDict([("data", []), ("mean", None), ("stdev", None)])
 
 
-def get_connected_rssi(dut,
-                       num_measurements=1,
-                       polling_frequency=SHORT_SLEEP,
-                       first_measurement_delay=0,
-                       disconnect_warning=True,
-                       ignore_samples=0,
-                       interface='wlan0'):
+def get_connected_rssi(
+    dut,
+    num_measurements=1,
+    polling_frequency=SHORT_SLEEP,
+    first_measurement_delay=0,
+    disconnect_warning=True,
+    ignore_samples=0,
+    interface="wlan0",
+):
     # yapf: disable
     connected_rssi = collections.OrderedDict(
         [('time_stamp', []),
@@ -140,110 +280,108 @@
          ('chain_1_rssi', empty_rssi_result())])
 
     # yapf: enable
-    previous_bssid = 'disconnected'
+    previous_bssid = "disconnected"
     t0 = time.time()
     time.sleep(first_measurement_delay)
     for idx in range(num_measurements):
         measurement_start_time = time.time()
-        connected_rssi['time_stamp'].append(measurement_start_time - t0)
+        connected_rssi["time_stamp"].append(measurement_start_time - t0)
         # Get signal poll RSSI
         try:
-            status_output = dut.adb.shell(
-                'wpa_cli -i {} status'.format(interface))
+            status_output = dut.adb.shell("wpa_cli -i {} status".format(interface))
         except:
-            status_output = ''
-        match = re.search('bssid=.*', status_output)
+            status_output = ""
+        match = re.search("bssid=.*", status_output)
         if match:
-            current_bssid = match.group(0).split('=')[1]
-            connected_rssi['bssid'].append(current_bssid)
+            current_bssid = match.group(0).split("=")[1]
+            connected_rssi["bssid"].append(current_bssid)
         else:
-            current_bssid = 'disconnected'
-            connected_rssi['bssid'].append(current_bssid)
-            if disconnect_warning and previous_bssid != 'disconnected':
-                logging.warning('WIFI DISCONNECT DETECTED!')
+            current_bssid = "disconnected"
+            connected_rssi["bssid"].append(current_bssid)
+            if disconnect_warning and previous_bssid != "disconnected":
+                logging.warning("WIFI DISCONNECT DETECTED!")
 
         previous_bssid = current_bssid
-        match = re.search('\s+ssid=.*', status_output)
+        match = re.search("\s+ssid=.*", status_output)
         if match:
-            ssid = match.group(0).split('=')[1]
-            connected_rssi['ssid'].append(ssid)
+            ssid = match.group(0).split("=")[1]
+            connected_rssi["ssid"].append(ssid)
         else:
-            connected_rssi['ssid'].append('disconnected')
+            connected_rssi["ssid"].append("disconnected")
 
-        #TODO: SEARCH MAP ; PICK CENTER CHANNEL
-        match = re.search('\s+freq=.*', status_output)
+        # TODO: SEARCH MAP ; PICK CENTER CHANNEL
+        match = re.search("\s+freq=.*", status_output)
         if match:
-            frequency = int(match.group(0).split('=')[1])
-            connected_rssi['frequency'].append(frequency)
+            frequency = int(match.group(0).split("=")[1])
+            connected_rssi["frequency"].append(frequency)
         else:
-            connected_rssi['frequency'].append(RSSI_ERROR_VAL)
+            connected_rssi["frequency"].append(RSSI_ERROR_VAL)
 
-        if interface == 'wlan0':
+        if interface == "wlan0":
             try:
-                per_chain_rssi = dut.adb.shell('wl phy_rssi_ant')
+                per_chain_rssi = dut.adb.shell("wl phy_rssi_ant")
                 chain_0_rssi = re.search(
-                    r'rssi\[0\]\s(?P<chain_0_rssi>[0-9\-]*)', per_chain_rssi)
+                    r"rssi\[0\]\s(?P<chain_0_rssi>[0-9\-]*)", per_chain_rssi
+                )
                 if chain_0_rssi:
-                    chain_0_rssi = int(chain_0_rssi.group('chain_0_rssi'))
+                    chain_0_rssi = int(chain_0_rssi.group("chain_0_rssi"))
                 else:
-                    chain_0_rssi = -float('inf')
+                    chain_0_rssi = -float("inf")
                 chain_1_rssi = re.search(
-                    r'rssi\[1\]\s(?P<chain_1_rssi>[0-9\-]*)', per_chain_rssi)
+                    r"rssi\[1\]\s(?P<chain_1_rssi>[0-9\-]*)", per_chain_rssi
+                )
                 if chain_1_rssi:
-                    chain_1_rssi = int(chain_1_rssi.group('chain_1_rssi'))
+                    chain_1_rssi = int(chain_1_rssi.group("chain_1_rssi"))
                 else:
-                    chain_1_rssi = -float('inf')
+                    chain_1_rssi = -float("inf")
             except:
                 chain_0_rssi = RSSI_ERROR_VAL
                 chain_1_rssi = RSSI_ERROR_VAL
-            connected_rssi['chain_0_rssi']['data'].append(chain_0_rssi)
-            connected_rssi['chain_1_rssi']['data'].append(chain_1_rssi)
+            connected_rssi["chain_0_rssi"]["data"].append(chain_0_rssi)
+            connected_rssi["chain_1_rssi"]["data"].append(chain_1_rssi)
             combined_rssi = math.pow(10, chain_0_rssi / 10) + math.pow(
-                10, chain_1_rssi / 10)
+                10, chain_1_rssi / 10
+            )
             combined_rssi = 10 * math.log10(combined_rssi)
-            connected_rssi['signal_poll_rssi']['data'].append(combined_rssi)
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                combined_rssi)
+            connected_rssi["signal_poll_rssi"]["data"].append(combined_rssi)
+            connected_rssi["signal_poll_avg_rssi"]["data"].append(combined_rssi)
         else:
             try:
                 signal_poll_output = dut.adb.shell(
-                    'wpa_cli -i {} signal_poll'.format(interface))
+                    "wpa_cli -i {} signal_poll".format(interface)
+                )
             except:
-                signal_poll_output = ''
-            match = re.search('RSSI=.*', signal_poll_output)
+                signal_poll_output = ""
+            match = re.search("RSSI=.*", signal_poll_output)
             if match:
-                temp_rssi = int(match.group(0).split('=')[1])
+                temp_rssi = int(match.group(0).split("=")[1])
                 if temp_rssi == -9999 or temp_rssi == 0:
-                    connected_rssi['signal_poll_rssi']['data'].append(
-                        RSSI_ERROR_VAL)
+                    connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL)
                 else:
-                    connected_rssi['signal_poll_rssi']['data'].append(
-                        temp_rssi)
+                    connected_rssi["signal_poll_rssi"]["data"].append(temp_rssi)
             else:
-                connected_rssi['signal_poll_rssi']['data'].append(
-                    RSSI_ERROR_VAL)
-            connected_rssi['chain_0_rssi']['data'].append(RSSI_ERROR_VAL)
-            connected_rssi['chain_1_rssi']['data'].append(RSSI_ERROR_VAL)
+                connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL)
+            connected_rssi["chain_0_rssi"]["data"].append(RSSI_ERROR_VAL)
+            connected_rssi["chain_1_rssi"]["data"].append(RSSI_ERROR_VAL)
         measurement_elapsed_time = time.time() - measurement_start_time
         time.sleep(max(0, polling_frequency - measurement_elapsed_time))
 
     # Statistics, Statistics
     for key, val in connected_rssi.copy().items():
-        if 'data' not in val:
+        if "data" not in val:
             continue
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
+        filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)]
         if len(filtered_rssi_values) > ignore_samples:
             filtered_rssi_values = filtered_rssi_values[ignore_samples:]
         if filtered_rssi_values:
-            connected_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
+            connected_rssi[key]["mean"] = statistics.mean(filtered_rssi_values)
             if len(filtered_rssi_values) > 1:
-                connected_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
+                connected_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values)
             else:
-                connected_rssi[key]['stdev'] = 0
+                connected_rssi[key]["stdev"] = 0
         else:
-            connected_rssi[key]['mean'] = RSSI_ERROR_VAL
-            connected_rssi[key]['stdev'] = RSSI_ERROR_VAL
+            connected_rssi[key]["mean"] = RSSI_ERROR_VAL
+            connected_rssi[key]["stdev"] = RSSI_ERROR_VAL
 
     return connected_rssi
 
@@ -253,69 +391,67 @@
     for bssid in tracked_bssids:
         scan_rssi[bssid] = empty_rssi_result()
     for idx in range(num_measurements):
-        scan_output = dut.adb.shell('cmd wifi start-scan')
+        scan_output = dut.adb.shell("cmd wifi start-scan")
         time.sleep(MED_SLEEP)
-        scan_output = dut.adb.shell('cmd wifi list-scan-results')
+        scan_output = dut.adb.shell("cmd wifi list-scan-results")
         for bssid in tracked_bssids:
-            bssid_result = re.search(bssid + '.*',
-                                     scan_output,
-                                     flags=re.IGNORECASE)
+            bssid_result = re.search(bssid + ".*", scan_output, flags=re.IGNORECASE)
             if bssid_result:
                 bssid_result = bssid_result.group(0).split()
-                scan_rssi[bssid]['data'].append(int(bssid_result[2]))
+                scan_rssi[bssid]["data"].append(int(bssid_result[2]))
             else:
-                scan_rssi[bssid]['data'].append(RSSI_ERROR_VAL)
+                scan_rssi[bssid]["data"].append(RSSI_ERROR_VAL)
     # Compute mean RSSIs. Only average valid readings.
     # Output RSSI_ERROR_VAL if no readings found.
     for key, val in scan_rssi.items():
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
+        filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)]
         if filtered_rssi_values:
-            scan_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
+            scan_rssi[key]["mean"] = statistics.mean(filtered_rssi_values)
             if len(filtered_rssi_values) > 1:
-                scan_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
+                scan_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values)
             else:
-                scan_rssi[key]['stdev'] = 0
+                scan_rssi[key]["stdev"] = 0
         else:
-            scan_rssi[key]['mean'] = RSSI_ERROR_VAL
-            scan_rssi[key]['stdev'] = RSSI_ERROR_VAL
+            scan_rssi[key]["mean"] = RSSI_ERROR_VAL
+            scan_rssi[key]["stdev"] = RSSI_ERROR_VAL
     return scan_rssi
 
 
 def get_sw_signature(dut):
-    bdf_output = dut.adb.shell('cksum /vendor/firmware/bcmdhd*')
-    logging.debug('BDF Checksum output: {}'.format(bdf_output))
-    bdf_signature = sum(
-        [int(line.split(' ')[0]) for line in bdf_output.splitlines()]) % 1000
+    bdf_output = dut.adb.shell("cksum /vendor/firmware/bcmdhd*")
+    logging.debug("BDF Checksum output: {}".format(bdf_output))
+    bdf_signature = (
+        sum([int(line.split(" ")[0]) for line in bdf_output.splitlines()]) % 1000
+    )
 
-    fw_version = dut.adb.shell('getprop vendor.wlan.firmware.version')
-    driver_version = dut.adb.shell('getprop vendor.wlan.driver.version')
-    logging.debug('Firmware version : {}. Driver version: {}'.format(
-        fw_version, driver_version))
-    fw_signature = '{}+{}'.format(fw_version, driver_version)
-    fw_signature = int(hashlib.md5(fw_signature.encode()).hexdigest(),
-                       16) % 1000
+    fw_version = dut.adb.shell("getprop vendor.wlan.firmware.version")
+    driver_version = dut.adb.shell("getprop vendor.wlan.driver.version")
+    logging.debug(
+        "Firmware version : {}. Driver version: {}".format(fw_version, driver_version)
+    )
+    fw_signature = "{}+{}".format(fw_version, driver_version)
+    fw_signature = int(hashlib.md5(fw_signature.encode()).hexdigest(), 16) % 1000
     serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
     return {
-        'config_signature': bdf_signature,
-        'fw_signature': fw_signature,
-        'serial_hash': serial_hash
+        "config_signature": bdf_signature,
+        "fw_signature": fw_signature,
+        "serial_hash": serial_hash,
     }
 
 
 def get_country_code(dut):
     try:
-        country_code = dut.adb.shell('wl country').split(' ')[0]
+        country_code = dut.adb.shell("wl country").split(" ")[0]
     except:
-        country_code = 'XZ'
-    if country_code == 'XZ':
-        country_code = 'WW'
-    logging.debug('Country code: {}'.format(country_code))
+        country_code = "XZ"
+    if country_code == "XZ":
+        country_code = "WW"
+    logging.debug("Country code: {}".format(country_code))
     return country_code
 
 
 def push_config(dut, config_file):
-    config_files_list = dut.adb.shell('ls /vendor/etc/*.cal').splitlines()
+    config_files_list = dut.adb.shell("ls /vendor/etc/*.cal").splitlines()
     for dst_file in config_files_list:
         dut.push_system_file(config_file, dst_file)
     dut.reboot()
@@ -338,67 +474,73 @@
         datamsc_file: path to Data.msc file
     """
     for file in firmware_files:
-        dut.push_system_file(file, '/vendor/firmware/')
+        dut.push_system_file(file, "/vendor/firmware/")
     dut.reboot()
 
 
 def disable_beamforming(dut):
-    dut.adb.shell('wl txbf 0')
+    dut.adb.shell("wl txbf 0")
 
 
 def set_nss_capability(dut, nss):
-    dut.adb.shell('wl he omi -r {} -t {}'.format(nss, nss))
+    dut.adb.shell("wl he omi -r {} -t {}".format(nss, nss))
 
 
 def set_chain_mask(dut, chain):
-    if chain == '2x2':
+    if chain == "2x2":
         chain = 3
     else:
         chain = chain + 1
     # Get current chain mask
     try:
-        curr_tx_chain = int(dut.adb.shell('wl txchain'))
-        curr_rx_chain = int(dut.adb.shell('wl rxchain'))
+        curr_tx_chain = int(dut.adb.shell("wl txchain"))
+        curr_rx_chain = int(dut.adb.shell("wl rxchain"))
     except:
         curr_tx_chain = -1
         curr_rx_chain = -1
     if curr_tx_chain == chain and curr_rx_chain == chain:
         return
     # Set chain mask if needed
-    dut.adb.shell('wl down')
+    dut.adb.shell("wl down")
     time.sleep(VERY_SHORT_SLEEP)
-    dut.adb.shell('wl txchain 0x{}'.format(chain))
-    dut.adb.shell('wl rxchain 0x{}'.format(chain))
-    dut.adb.shell('wl up')
+    dut.adb.shell("wl txchain 0x{}".format(chain))
+    dut.adb.shell("wl rxchain 0x{}".format(chain))
+    dut.adb.shell("wl up")
 
 
-class LinkLayerStats():
-
-    LLSTATS_CMD = 'wl dump ampdu; wl counters;'
-    LL_STATS_CLEAR_CMD = 'wl dump_clear ampdu; wl reset_cnts;'
-    BW_REGEX = re.compile(r'Chanspec:.+ (?P<bandwidth>[0-9]+)MHz')
-    MCS_REGEX = re.compile(r'(?P<count>[0-9]+)\((?P<percent>[0-9]+)%\)')
-    RX_REGEX = re.compile(r'RX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)'
-                          '\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)')
-    TX_REGEX = re.compile(r'TX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)'
-                          '\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)')
-    TX_PER_REGEX = re.compile(
-        r'(?P<mode>\S+) PER\s+:\s*(?P<nss1>[0-9, ,(,),%]*)'
-        '\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)')
-    RX_FCS_REGEX = re.compile(
-        r'rxbadfcs (?P<rx_bad_fcs>[0-9]*).+\n.+goodfcs (?P<rx_good_fcs>[0-9]*)'
+class LinkLayerStats:
+    LLSTATS_CMD = "wl dump ampdu; wl counters;"
+    LL_STATS_CLEAR_CMD = "wl dump_clear ampdu; wl reset_cnts;"
+    BW_REGEX = re.compile(r"Chanspec:.+ (?P<bandwidth>[0-9]+)MHz")
+    MCS_REGEX = re.compile(r"(?P<count>[0-9]+)\((?P<percent>[0-9]+)%\)")
+    RX_REGEX = re.compile(
+        r"RX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)"
+        "\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)"
     )
-    RX_AGG_REGEX = re.compile(r'rxmpduperampdu (?P<aggregation>[0-9]*)')
-    TX_AGG_REGEX = re.compile(r' mpduperampdu (?P<aggregation>[0-9]*)')
+    TX_REGEX = re.compile(
+        r"TX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)"
+        "\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)"
+    )
+    TX_PER_REGEX = re.compile(
+        r"(?P<mode>\S+) PER\s+:\s*(?P<nss1>[0-9, ,(,),%]*)"
+        "\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)"
+    )
+    RX_FCS_REGEX = re.compile(
+        r"rxbadfcs (?P<rx_bad_fcs>[0-9]*).+\n.+goodfcs (?P<rx_good_fcs>[0-9]*)"
+    )
+    RX_AGG_REGEX = re.compile(r"rxmpduperampdu (?P<aggregation>[0-9]*)")
+    TX_AGG_REGEX = re.compile(r" mpduperampdu (?P<aggregation>[0-9]*)")
     TX_AGG_STOP_REGEX = re.compile(
-        r'agg stop reason: tot_agg_tried (?P<agg_tried>[0-9]+) agg_txcancel (?P<agg_canceled>[0-9]+) (?P<agg_stop_reason>.+)'
+        r"agg stop reason: tot_agg_tried (?P<agg_tried>[0-9]+) agg_txcancel (?P<agg_canceled>[0-9]+) (?P<agg_stop_reason>.+)"
     )
     TX_AGG_STOP_REASON_REGEX = re.compile(
-        r'(?P<reason>\w+) [0-9]+ \((?P<value>[0-9]+%)\)')
+        r"(?P<reason>\w+) [0-9]+ \((?P<value>[0-9]+%)\)"
+    )
     MCS_ID = collections.namedtuple(
-        'mcs_id', ['mode', 'num_streams', 'bandwidth', 'mcs', 'gi'])
-    MODE_MAP = {'0': '11a/g', '1': '11b', '2': '11n', '3': '11ac'}
-    BW_MAP = {'0': 20, '1': 40, '2': 80}
+        "mcs_id", ["mode", "num_streams", "bandwidth", "mcs", "gi"]
+    )
+    MODE_MAP = {"0": "11a/g", "1": "11b", "2": "11n", "3": "11ac"}
+    BW_MAP = {"0": 20, "1": 40, "2": 80}
 
     def __init__(self, dut, llstats_enabled=True):
         self.dut = dut
@@ -409,17 +551,17 @@
     def update_stats(self):
         if self.llstats_enabled:
             try:
-                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD,
-                                                    timeout=1)
+                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD, timeout=1)
                 self.dut.adb.shell_nb(self.LL_STATS_CLEAR_CMD)
 
                 wl_join = self.dut.adb.shell("wl status")
                 self.bandwidth = int(
-                    re.search(self.BW_REGEX, wl_join).group('bandwidth'))
+                    re.search(self.BW_REGEX, wl_join).group("bandwidth")
+                )
             except:
-                llstats_output = ''
+                llstats_output = ""
         else:
-            llstats_output = ''
+            llstats_output = ""
         self._update_stats(llstats_output)
 
     def reset_stats(self):
@@ -427,22 +569,21 @@
         self.llstats_incremental = self._empty_llstats()
 
     def _empty_llstats(self):
-        return collections.OrderedDict(mcs_stats=collections.OrderedDict(),
-                                       mpdu_stats=collections.OrderedDict(),
-                                       summary=collections.OrderedDict())
+        return collections.OrderedDict(
+            mcs_stats=collections.OrderedDict(),
+            mpdu_stats=collections.OrderedDict(),
+            summary=collections.OrderedDict(),
+        )
 
     def _empty_mcs_stat(self):
-        return collections.OrderedDict(txmpdu=0,
-                                       rxmpdu=0,
-                                       mpdu_lost=0,
-                                       retries=0,
-                                       retries_short=0,
-                                       retries_long=0)
+        return collections.OrderedDict(
+            txmpdu=0, rxmpdu=0, mpdu_lost=0, retries=0, retries_short=0, retries_long=0
+        )
 
     def _mcs_id_to_string(self, mcs_id):
-        mcs_string = '{} Nss{} MCS{} GI{}'.format(mcs_id.mode,
-                                                  mcs_id.num_streams,
-                                                  mcs_id.mcs, mcs_id.gi)
+        mcs_string = "{} Nss{} MCS{} GI{}".format(
+            mcs_id.mode, mcs_id.num_streams, mcs_id.mcs, mcs_id.gi
+        )
         return mcs_string
 
     def _parse_mcs_stats(self, llstats_output):
@@ -456,38 +597,37 @@
         rx_match_iter = re.finditer(self.RX_REGEX, llstats_output)
         tx_match_iter = re.finditer(self.TX_REGEX, llstats_output)
         tx_per_match_iter = re.finditer(self.TX_PER_REGEX, llstats_output)
-        for rx_match, tx_match, tx_per_match in zip(rx_match_iter,
-                                                    tx_match_iter,
-                                                    tx_per_match_iter):
-            mode = rx_match.group('mode')
-            mode = 'HT' if mode == 'MCS' else mode
+        for rx_match, tx_match, tx_per_match in zip(
+            rx_match_iter, tx_match_iter, tx_per_match_iter
+        ):
+            mode = rx_match.group("mode")
+            mode = "HT" if mode == "MCS" else mode
             for nss in [1, 2]:
-                rx_mcs_iter = re.finditer(self.MCS_REGEX,
-                                          rx_match.group(nss + 1))
-                tx_mcs_iter = re.finditer(self.MCS_REGEX,
-                                          tx_match.group(nss + 1))
-                tx_per_iter = re.finditer(self.MCS_REGEX,
-                                          tx_per_match.group(nss + 1))
-                for mcs, (rx_mcs_stats, tx_mcs_stats,
-                          tx_per_mcs_stats) in enumerate(
-                              itertools.zip_longest(rx_mcs_iter, tx_mcs_iter,
-                                                    tx_per_iter)):
+                rx_mcs_iter = re.finditer(self.MCS_REGEX, rx_match.group(nss + 1))
+                tx_mcs_iter = re.finditer(self.MCS_REGEX, tx_match.group(nss + 1))
+                tx_per_iter = re.finditer(self.MCS_REGEX, tx_per_match.group(nss + 1))
+                for mcs, (rx_mcs_stats, tx_mcs_stats, tx_per_mcs_stats) in enumerate(
+                    itertools.zip_longest(rx_mcs_iter, tx_mcs_iter, tx_per_iter)
+                ):
                     current_mcs = self.MCS_ID(
-                        mode, nss, self.bandwidth,
-                        mcs + int(8 * (mode == 'HT') * (nss - 1)), 0)
+                        mode,
+                        nss,
+                        self.bandwidth,
+                        mcs + int(8 * (mode == "HT") * (nss - 1)),
+                        0,
+                    )
                     current_stats = collections.OrderedDict(
-                        txmpdu=int(tx_mcs_stats.group('count'))
-                        if tx_mcs_stats else 0,
-                        rxmpdu=int(rx_mcs_stats.group('count'))
-                        if rx_mcs_stats else 0,
+                        txmpdu=int(tx_mcs_stats.group("count")) if tx_mcs_stats else 0,
+                        rxmpdu=int(rx_mcs_stats.group("count")) if rx_mcs_stats else 0,
                         mpdu_lost=0,
-                        retries=tx_per_mcs_stats.group('count')
-                        if tx_per_mcs_stats else 0,
+                        retries=tx_per_mcs_stats.group("count")
+                        if tx_per_mcs_stats
+                        else 0,
                         retries_short=0,
                         retries_long=0,
-                        mcs_id=current_mcs)
-                    llstats_dict[self._mcs_id_to_string(
-                        current_mcs)] = current_stats
+                        mcs_id=current_mcs,
+                    )
+                    llstats_dict[self._mcs_id_to_string(current_mcs)] = current_stats
         return llstats_dict
 
     def _parse_mpdu_stats(self, llstats_output):
@@ -498,81 +638,97 @@
 
         if rx_agg_match and tx_agg_match and tx_agg_stop_match and rx_fcs_match:
             agg_stop_dict = collections.OrderedDict(
-                rx_aggregation=int(rx_agg_match.group('aggregation')),
-                tx_aggregation=int(tx_agg_match.group('aggregation')),
-                tx_agg_tried=int(tx_agg_stop_match.group('agg_tried')),
-                tx_agg_canceled=int(tx_agg_stop_match.group('agg_canceled')),
-                rx_good_fcs=int(rx_fcs_match.group('rx_good_fcs')),
-                rx_bad_fcs=int(rx_fcs_match.group('rx_bad_fcs')),
-                agg_stop_reason=collections.OrderedDict())
+                rx_aggregation=int(rx_agg_match.group("aggregation")),
+                tx_aggregation=int(tx_agg_match.group("aggregation")),
+                tx_agg_tried=int(tx_agg_stop_match.group("agg_tried")),
+                tx_agg_canceled=int(tx_agg_stop_match.group("agg_canceled")),
+                rx_good_fcs=int(rx_fcs_match.group("rx_good_fcs")),
+                rx_bad_fcs=int(rx_fcs_match.group("rx_bad_fcs")),
+                agg_stop_reason=collections.OrderedDict(),
+            )
             agg_reason_match = re.finditer(
                 self.TX_AGG_STOP_REASON_REGEX,
-                tx_agg_stop_match.group('agg_stop_reason'))
+                tx_agg_stop_match.group("agg_stop_reason"),
+            )
             for reason_match in agg_reason_match:
-                agg_stop_dict['agg_stop_reason'][reason_match.group(
-                    'reason')] = reason_match.group('value')
+                agg_stop_dict["agg_stop_reason"][
+                    reason_match.group("reason")
+                ] = reason_match.group("value")
 
         else:
-            agg_stop_dict = collections.OrderedDict(rx_aggregation=0,
-                                                    tx_aggregation=0,
-                                                    tx_agg_tried=0,
-                                                    tx_agg_canceled=0,
-                                                    rx_good_fcs=0,
-                                                    rx_bad_fcs=0,
-                                                    agg_stop_reason=None)
+            agg_stop_dict = collections.OrderedDict(
+                rx_aggregation=0,
+                tx_aggregation=0,
+                tx_agg_tried=0,
+                tx_agg_canceled=0,
+                rx_good_fcs=0,
+                rx_bad_fcs=0,
+                agg_stop_reason=None,
+            )
         return agg_stop_dict
 
     def _generate_stats_summary(self, llstats_dict):
-        llstats_summary = collections.OrderedDict(common_tx_mcs=None,
-                                                  common_tx_mcs_count=0,
-                                                  common_tx_mcs_freq=0,
-                                                  common_rx_mcs=None,
-                                                  common_rx_mcs_count=0,
-                                                  common_rx_mcs_freq=0,
-                                                  rx_per=float('nan'))
+        llstats_summary = collections.OrderedDict(
+            common_tx_mcs=None,
+            common_tx_mcs_count=0,
+            common_tx_mcs_freq=0,
+            common_rx_mcs=None,
+            common_rx_mcs_count=0,
+            common_rx_mcs_freq=0,
+            rx_per=float("nan"),
+        )
         mcs_ids = []
         tx_mpdu = []
         rx_mpdu = []
         phy_rates = []
-        for mcs_str, mcs_stats in llstats_dict['mcs_stats'].items():
-            mcs_id = mcs_stats['mcs_id']
+        for mcs_str, mcs_stats in llstats_dict["mcs_stats"].items():
+            mcs_id = mcs_stats["mcs_id"]
             mcs_ids.append(mcs_str)
-            tx_mpdu.append(mcs_stats['txmpdu'])
-            rx_mpdu.append(mcs_stats['rxmpdu'])
-            phy_rates.append(RATE_TABLE[mcs_id.mode][mcs_id.num_streams][
-                mcs_id.bandwidth][mcs_id.mcs])
+            tx_mpdu.append(mcs_stats["txmpdu"])
+            rx_mpdu.append(mcs_stats["rxmpdu"])
+            phy_rates.append(
+                RATE_TABLE[mcs_id.mode][mcs_id.num_streams][mcs_id.bandwidth][
+                    mcs_id.mcs
+                ]
+            )
         if len(tx_mpdu) == 0 or len(rx_mpdu) == 0:
             return llstats_summary
-        llstats_summary['common_tx_mcs'] = mcs_ids[numpy.argmax(tx_mpdu)]
-        llstats_summary['common_tx_mcs_count'] = numpy.max(tx_mpdu)
-        llstats_summary['common_rx_mcs'] = mcs_ids[numpy.argmax(rx_mpdu)]
-        llstats_summary['common_rx_mcs_count'] = numpy.max(rx_mpdu)
+        llstats_summary["common_tx_mcs"] = mcs_ids[numpy.argmax(tx_mpdu)]
+        llstats_summary["common_tx_mcs_count"] = numpy.max(tx_mpdu)
+        llstats_summary["common_rx_mcs"] = mcs_ids[numpy.argmax(rx_mpdu)]
+        llstats_summary["common_rx_mcs_count"] = numpy.max(rx_mpdu)
         if sum(tx_mpdu) and sum(rx_mpdu):
-            llstats_summary['mean_tx_phy_rate'] = numpy.average(
-                phy_rates, weights=tx_mpdu)
-            llstats_summary['mean_rx_phy_rate'] = numpy.average(
-                phy_rates, weights=rx_mpdu)
-            llstats_summary['common_tx_mcs_freq'] = (
-                llstats_summary['common_tx_mcs_count'] / sum(tx_mpdu))
-            llstats_summary['common_rx_mcs_freq'] = (
-                llstats_summary['common_rx_mcs_count'] / sum(rx_mpdu))
-            total_rx_frames = llstats_dict['mpdu_stats'][
-                'rx_good_fcs'] + llstats_dict['mpdu_stats']['rx_bad_fcs']
+            llstats_summary["mean_tx_phy_rate"] = numpy.average(
+                phy_rates, weights=tx_mpdu
+            )
+            llstats_summary["mean_rx_phy_rate"] = numpy.average(
+                phy_rates, weights=rx_mpdu
+            )
+            llstats_summary["common_tx_mcs_freq"] = llstats_summary[
+                "common_tx_mcs_count"
+            ] / sum(tx_mpdu)
+            llstats_summary["common_rx_mcs_freq"] = llstats_summary[
+                "common_rx_mcs_count"
+            ] / sum(rx_mpdu)
+            total_rx_frames = (
+                llstats_dict["mpdu_stats"]["rx_good_fcs"]
+                + llstats_dict["mpdu_stats"]["rx_bad_fcs"]
+            )
             if total_rx_frames:
-                llstats_summary['rx_per'] = (
-                    llstats_dict['mpdu_stats']['rx_bad_fcs'] /
-                    (total_rx_frames)) * 100
+                llstats_summary["rx_per"] = (
+                    llstats_dict["mpdu_stats"]["rx_bad_fcs"] / (total_rx_frames)
+                ) * 100
         return llstats_summary
 
     def _update_stats(self, llstats_output):
         self.llstats_cumulative = self._empty_llstats()
         self.llstats_incremental = self._empty_llstats()
-        self.llstats_incremental['raw_output'] = llstats_output
-        self.llstats_incremental['mcs_stats'] = self._parse_mcs_stats(
-            llstats_output)
-        self.llstats_incremental['mpdu_stats'] = self._parse_mpdu_stats(
-            llstats_output)
-        self.llstats_incremental['summary'] = self._generate_stats_summary(
-            self.llstats_incremental)
-        self.llstats_cumulative['summary'] = self._generate_stats_summary(
-            self.llstats_cumulative)
+        self.llstats_incremental["raw_output"] = llstats_output
+        self.llstats_incremental["mcs_stats"] = self._parse_mcs_stats(llstats_output)
+        self.llstats_incremental["mpdu_stats"] = self._parse_mpdu_stats(llstats_output)
+        self.llstats_incremental["summary"] = self._generate_stats_summary(
+            self.llstats_incremental
+        )
+        self.llstats_cumulative["summary"] = self._generate_stats_summary(
+            self.llstats_cumulative
+        )
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py
index 3405260..8d0dff5 100644
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py
+++ b/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py
@@ -16,8 +16,8 @@
 
 import re
 
-RTT_REGEX = re.compile(r'^\[(?P<timestamp>\S+)\] .*? time=(?P<rtt>\S+)')
-LOSS_REGEX = re.compile(r'(?P<loss>\S+)% packet loss')
+RTT_REGEX = re.compile(r"^\[(?P<timestamp>\S+)\] .*? time=(?P<rtt>\S+)")
+LOSS_REGEX = re.compile(r"(?P<loss>\S+)% packet loss")
 
 
 class PingResult(object):
@@ -35,47 +35,50 @@
         ping_interarrivals: A list-like object enumerating the amount of time
             between the beginning of each subsequent transmission.
     """
+
     def __init__(self, ping_output):
         self.packet_loss_percentage = 100
         self.transmission_times = []
 
         self.rtts = _ListWrap(self.transmission_times, lambda entry: entry.rtt)
-        self.timestamps = _ListWrap(self.transmission_times,
-                                    lambda entry: entry.timestamp)
+        self.timestamps = _ListWrap(
+            self.transmission_times, lambda entry: entry.timestamp
+        )
         self.ping_interarrivals = _PingInterarrivals(self.transmission_times)
 
         self.start_time = 0
         for line in ping_output:
-            if 'loss' in line:
+            if "loss" in line:
                 match = re.search(LOSS_REGEX, line)
-                self.packet_loss_percentage = float(match.group('loss'))
-            if 'time=' in line:
+                self.packet_loss_percentage = float(match.group("loss"))
+            if "time=" in line:
                 match = re.search(RTT_REGEX, line)
                 if self.start_time == 0:
-                    self.start_time = float(match.group('timestamp'))
+                    self.start_time = float(match.group("timestamp"))
                 self.transmission_times.append(
                     PingTransmissionTimes(
-                        float(match.group('timestamp')) - self.start_time,
-                        float(match.group('rtt'))))
-        self.connected = len(
-            ping_output) > 1 and self.packet_loss_percentage < 100
+                        float(match.group("timestamp")) - self.start_time,
+                        float(match.group("rtt")),
+                    )
+                )
+        self.connected = len(ping_output) > 1 and self.packet_loss_percentage < 100
 
     def __getitem__(self, item):
-        if item == 'rtt':
+        if item == "rtt":
             return self.rtts
-        if item == 'connected':
+        if item == "connected":
             return self.connected
-        if item == 'packet_loss_percentage':
+        if item == "packet_loss_percentage":
             return self.packet_loss_percentage
-        raise ValueError('Invalid key. Please use an attribute instead.')
+        raise ValueError("Invalid key. Please use an attribute instead.")
 
     def as_dict(self):
         return {
-            'connected': 1 if self.connected else 0,
-            'rtt': list(self.rtts),
-            'time_stamp': list(self.timestamps),
-            'ping_interarrivals': list(self.ping_interarrivals),
-            'packet_loss_percentage': self.packet_loss_percentage
+            "connected": 1 if self.connected else 0,
+            "rtt": list(self.rtts),
+            "time_stamp": list(self.timestamps),
+            "ping_interarrivals": list(self.ping_interarrivals),
+            "packet_loss_percentage": self.packet_loss_percentage,
         }
 
 
@@ -86,6 +89,7 @@
         rtt: The round trip time for the packet sent.
         timestamp: The timestamp the packet started its trip.
     """
+
     def __init__(self, timestamp, rtt):
         self.rtt = rtt
         self.timestamp = timestamp
@@ -93,6 +97,7 @@
 
 class _ListWrap(object):
     """A convenient helper class for treating list iterators as native lists."""
+
     def __init__(self, wrapped_list, func):
         self.__wrapped_list = wrapped_list
         self.__func = func
@@ -110,12 +115,14 @@
 
 class _PingInterarrivals(object):
     """A helper class for treating ping interarrivals as a native list."""
+
     def __init__(self, ping_entries):
         self.__ping_entries = ping_entries
 
     def __getitem__(self, key):
-        return (self.__ping_entries[key + 1].timestamp -
-                self.__ping_entries[key].timestamp)
+        return (
+            self.__ping_entries[key + 1].timestamp - self.__ping_entries[key].timestamp
+        )
 
     def __iter__(self):
         for index in range(len(self.__ping_entries) - 1):
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py
index 1383f6a..2f50cf1 100644
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py
+++ b/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py
@@ -22,32 +22,34 @@
 import re
 import statistics
 import time
-from antlion import asserts
+
+from mobly import asserts
 
 SHORT_SLEEP = 1
 MED_SLEEP = 6
-STATION_DUMP = 'iw {} station dump'
-SCAN = 'wpa_cli scan'
-SCAN_RESULTS = 'wpa_cli scan_results'
-SIGNAL_POLL = 'wpa_cli signal_poll'
-WPA_CLI_STATUS = 'wpa_cli status'
-RSSI_ERROR_VAL = float('nan')
-FW_REGEX = re.compile(r'FW:(?P<firmware>\S+) HW:')
+STATION_DUMP = "iw {} station dump"
+SCAN = "wpa_cli scan"
+SCAN_RESULTS = "wpa_cli scan_results"
+SIGNAL_POLL = "wpa_cli signal_poll"
+WPA_CLI_STATUS = "wpa_cli status"
+RSSI_ERROR_VAL = float("nan")
+FW_REGEX = re.compile(r"FW:(?P<firmware>\S+) HW:")
 
 
 # Rssi Utilities
 def empty_rssi_result():
-    return collections.OrderedDict([('data', []), ('mean', None),
-                                    ('stdev', None)])
+    return collections.OrderedDict([("data", []), ("mean", None), ("stdev", None)])
 
 
-def get_connected_rssi(dut,
-                       num_measurements=1,
-                       polling_frequency=SHORT_SLEEP,
-                       first_measurement_delay=0,
-                       disconnect_warning=True,
-                       ignore_samples=0,
-                       interface='wlan0'):
+def get_connected_rssi(
+    dut,
+    num_measurements=1,
+    polling_frequency=SHORT_SLEEP,
+    first_measurement_delay=0,
+    disconnect_warning=True,
+    ignore_samples=0,
+    interface="wlan0",
+):
     # yapf: disable
     connected_rssi = collections.OrderedDict(
         [('time_stamp', []),
@@ -57,101 +59,98 @@
          ('chain_0_rssi', empty_rssi_result()),
          ('chain_1_rssi', empty_rssi_result())])
     # yapf: enable
-    previous_bssid = 'disconnected'
+    previous_bssid = "disconnected"
     t0 = time.time()
     time.sleep(first_measurement_delay)
     for idx in range(num_measurements):
         measurement_start_time = time.time()
-        connected_rssi['time_stamp'].append(measurement_start_time - t0)
+        connected_rssi["time_stamp"].append(measurement_start_time - t0)
         # Get signal poll RSSI
         try:
-            status_output = dut.adb.shell(
-                'wpa_cli -i {} status'.format(interface))
+            status_output = dut.adb.shell("wpa_cli -i {} status".format(interface))
         except:
-            status_output = ''
-        match = re.search('bssid=.*', status_output)
+            status_output = ""
+        match = re.search("bssid=.*", status_output)
         if match:
-            current_bssid = match.group(0).split('=')[1]
-            connected_rssi['bssid'].append(current_bssid)
+            current_bssid = match.group(0).split("=")[1]
+            connected_rssi["bssid"].append(current_bssid)
         else:
-            current_bssid = 'disconnected'
-            connected_rssi['bssid'].append(current_bssid)
-            if disconnect_warning and previous_bssid != 'disconnected':
-                logging.warning('WIFI DISCONNECT DETECTED!')
+            current_bssid = "disconnected"
+            connected_rssi["bssid"].append(current_bssid)
+            if disconnect_warning and previous_bssid != "disconnected":
+                logging.warning("WIFI DISCONNECT DETECTED!")
         previous_bssid = current_bssid
-        match = re.search('\s+ssid=.*', status_output)
+        match = re.search("\s+ssid=.*", status_output)
         if match:
-            ssid = match.group(0).split('=')[1]
-            connected_rssi['ssid'].append(ssid)
+            ssid = match.group(0).split("=")[1]
+            connected_rssi["ssid"].append(ssid)
         else:
-            connected_rssi['ssid'].append('disconnected')
+            connected_rssi["ssid"].append("disconnected")
         try:
             signal_poll_output = dut.adb.shell(
-                'wpa_cli -i {} signal_poll'.format(interface))
+                "wpa_cli -i {} signal_poll".format(interface)
+            )
         except:
-            signal_poll_output = ''
-        match = re.search('FREQUENCY=.*', signal_poll_output)
+            signal_poll_output = ""
+        match = re.search("FREQUENCY=.*", signal_poll_output)
         if match:
-            frequency = int(match.group(0).split('=')[1])
-            connected_rssi['frequency'].append(frequency)
+            frequency = int(match.group(0).split("=")[1])
+            connected_rssi["frequency"].append(frequency)
         else:
-            connected_rssi['frequency'].append(RSSI_ERROR_VAL)
-        match = re.search('RSSI=.*', signal_poll_output)
+            connected_rssi["frequency"].append(RSSI_ERROR_VAL)
+        match = re.search("RSSI=.*", signal_poll_output)
         if match:
-            temp_rssi = int(match.group(0).split('=')[1])
+            temp_rssi = int(match.group(0).split("=")[1])
             if temp_rssi == -9999 or temp_rssi == 0:
-                connected_rssi['signal_poll_rssi']['data'].append(
-                    RSSI_ERROR_VAL)
+                connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL)
             else:
-                connected_rssi['signal_poll_rssi']['data'].append(temp_rssi)
+                connected_rssi["signal_poll_rssi"]["data"].append(temp_rssi)
         else:
-            connected_rssi['signal_poll_rssi']['data'].append(RSSI_ERROR_VAL)
-        match = re.search('AVG_RSSI=.*', signal_poll_output)
+            connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL)
+        match = re.search("AVG_RSSI=.*", signal_poll_output)
         if match:
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                int(match.group(0).split('=')[1]))
+            connected_rssi["signal_poll_avg_rssi"]["data"].append(
+                int(match.group(0).split("=")[1])
+            )
         else:
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                RSSI_ERROR_VAL)
+            connected_rssi["signal_poll_avg_rssi"]["data"].append(RSSI_ERROR_VAL)
 
         # Get per chain RSSI
         try:
             per_chain_rssi = dut.adb.shell(STATION_DUMP.format(interface))
         except:
-            per_chain_rssi = ''
-        match = re.search('.*signal avg:.*', per_chain_rssi)
+            per_chain_rssi = ""
+        match = re.search(".*signal avg:.*", per_chain_rssi)
         if match:
-            per_chain_rssi = per_chain_rssi[per_chain_rssi.find('[') +
-                                            1:per_chain_rssi.find(']')]
-            per_chain_rssi = per_chain_rssi.split(', ')
-            connected_rssi['chain_0_rssi']['data'].append(
-                int(per_chain_rssi[0]))
-            connected_rssi['chain_1_rssi']['data'].append(
-                int(per_chain_rssi[1]))
+            per_chain_rssi = per_chain_rssi[
+                per_chain_rssi.find("[") + 1 : per_chain_rssi.find("]")
+            ]
+            per_chain_rssi = per_chain_rssi.split(", ")
+            connected_rssi["chain_0_rssi"]["data"].append(int(per_chain_rssi[0]))
+            connected_rssi["chain_1_rssi"]["data"].append(int(per_chain_rssi[1]))
         else:
-            connected_rssi['chain_0_rssi']['data'].append(RSSI_ERROR_VAL)
-            connected_rssi['chain_1_rssi']['data'].append(RSSI_ERROR_VAL)
+            connected_rssi["chain_0_rssi"]["data"].append(RSSI_ERROR_VAL)
+            connected_rssi["chain_1_rssi"]["data"].append(RSSI_ERROR_VAL)
         measurement_elapsed_time = time.time() - measurement_start_time
         time.sleep(max(0, polling_frequency - measurement_elapsed_time))
 
     # Compute mean RSSIs. Only average valid readings.
     # Output RSSI_ERROR_VAL if no valid connected readings found.
     for key, val in connected_rssi.copy().items():
-        if 'data' not in val:
+        if "data" not in val:
             continue
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
+        filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)]
         if len(filtered_rssi_values) > ignore_samples:
             filtered_rssi_values = filtered_rssi_values[ignore_samples:]
         if filtered_rssi_values:
-            connected_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
+            connected_rssi[key]["mean"] = statistics.mean(filtered_rssi_values)
             if len(filtered_rssi_values) > 1:
-                connected_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
+                connected_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values)
             else:
-                connected_rssi[key]['stdev'] = 0
+                connected_rssi[key]["stdev"] = 0
         else:
-            connected_rssi[key]['mean'] = RSSI_ERROR_VAL
-            connected_rssi[key]['stdev'] = RSSI_ERROR_VAL
+            connected_rssi[key]["mean"] = RSSI_ERROR_VAL
+            connected_rssi[key]["stdev"] = RSSI_ERROR_VAL
     return connected_rssi
 
 
@@ -164,61 +163,58 @@
         time.sleep(MED_SLEEP)
         scan_output = dut.adb.shell(SCAN_RESULTS)
         for bssid in tracked_bssids:
-            bssid_result = re.search(bssid + '.*',
-                                     scan_output,
-                                     flags=re.IGNORECASE)
+            bssid_result = re.search(bssid + ".*", scan_output, flags=re.IGNORECASE)
             if bssid_result:
-                bssid_result = bssid_result.group(0).split('\t')
-                scan_rssi[bssid]['data'].append(int(bssid_result[2]))
+                bssid_result = bssid_result.group(0).split("\t")
+                scan_rssi[bssid]["data"].append(int(bssid_result[2]))
             else:
-                scan_rssi[bssid]['data'].append(RSSI_ERROR_VAL)
+                scan_rssi[bssid]["data"].append(RSSI_ERROR_VAL)
     # Compute mean RSSIs. Only average valid readings.
     # Output RSSI_ERROR_VAL if no readings found.
     for key, val in scan_rssi.items():
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
+        filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)]
         if filtered_rssi_values:
-            scan_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
+            scan_rssi[key]["mean"] = statistics.mean(filtered_rssi_values)
             if len(filtered_rssi_values) > 1:
-                scan_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
+                scan_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values)
             else:
-                scan_rssi[key]['stdev'] = 0
+                scan_rssi[key]["stdev"] = 0
         else:
-            scan_rssi[key]['mean'] = RSSI_ERROR_VAL
-            scan_rssi[key]['stdev'] = RSSI_ERROR_VAL
+            scan_rssi[key]["mean"] = RSSI_ERROR_VAL
+            scan_rssi[key]["stdev"] = RSSI_ERROR_VAL
     return scan_rssi
 
 
 def get_sw_signature(dut):
-    bdf_output = dut.adb.shell('cksum /vendor/firmware/bdwlan*')
-    logging.debug('BDF Checksum output: {}'.format(bdf_output))
-    bdf_signature = sum(
-        [int(line.split(' ')[0]) for line in bdf_output.splitlines()]) % 1000
+    bdf_output = dut.adb.shell("cksum /vendor/firmware/bdwlan*")
+    logging.debug("BDF Checksum output: {}".format(bdf_output))
+    bdf_signature = (
+        sum([int(line.split(" ")[0]) for line in bdf_output.splitlines()]) % 1000
+    )
 
-    fw_output = dut.adb.shell('halutil -logger -get fw')
-    logging.debug('Firmware version output: {}'.format(fw_output))
-    fw_version = re.search(FW_REGEX, fw_output).group('firmware')
-    fw_signature = fw_version.split('.')[-3:-1]
-    fw_signature = float('.'.join(fw_signature))
+    fw_output = dut.adb.shell("halutil -logger -get fw")
+    logging.debug("Firmware version output: {}".format(fw_output))
+    fw_version = re.search(FW_REGEX, fw_output).group("firmware")
+    fw_signature = fw_version.split(".")[-3:-1]
+    fw_signature = float(".".join(fw_signature))
     serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
     return {
-        'config_signature': bdf_signature,
-        'fw_signature': fw_signature,
-        'serial_hash': serial_hash
+        "config_signature": bdf_signature,
+        "fw_signature": fw_signature,
+        "serial_hash": serial_hash,
     }
 
 
 def get_country_code(dut):
-    country_code = dut.adb.shell('iw reg get | grep country | head -1')
-    country_code = country_code.split(':')[0].split(' ')[1]
-    if country_code == '00':
-        country_code = 'WW'
+    country_code = dut.adb.shell("iw reg get | grep country | head -1")
+    country_code = country_code.split(":")[0].split(" ")[1]
+    if country_code == "00":
+        country_code = "WW"
     return country_code
 
 
 def push_config(dut, config_file):
-    config_files_list = dut.adb.shell(
-        'ls /vendor/firmware/bdwlan*').splitlines()
+    config_files_list = dut.adb.shell("ls /vendor/firmware/bdwlan*").splitlines()
     for dst_file in config_files_list:
         dut.push_system_file(config_file, dst_file)
     dut.reboot()
@@ -228,20 +224,20 @@
     dut.droid.wifiEnableVerboseLogging(1)
     msg = "Failed to enable WiFi verbose logging."
     asserts.assert_equal(dut.droid.wifiGetVerboseLoggingLevel(), 1, msg)
-    logging.info('Starting CNSS logs')
-    dut.adb.shell("find /data/vendor/wifi/wlan_logs/ -type f -delete",
-                  ignore_status=True)
-    dut.adb.shell_nb('cnss_diag -f -s')
+    logging.info("Starting CNSS logs")
+    dut.adb.shell(
+        "find /data/vendor/wifi/wlan_logs/ -type f -delete", ignore_status=True
+    )
+    dut.adb.shell_nb("cnss_diag -f -s")
 
 
 def stop_wifi_logging(dut):
-    logging.info('Stopping CNSS logs')
-    dut.adb.shell('killall cnss_diag')
+    logging.info("Stopping CNSS logs")
+    dut.adb.shell("killall cnss_diag")
     logs = dut.get_file_names("/data/vendor/wifi/wlan_logs/")
     if logs:
         dut.log.info("Pulling cnss_diag logs %s", logs)
-        log_path = os.path.join(dut.device_log_path,
-                                "CNSS_DIAG_%s" % dut.serial)
+        log_path = os.path.join(dut.device_log_path, "CNSS_DIAG_%s" % dut.serial)
         os.makedirs(log_path, exist_ok=True)
         dut.pull_files(logs, log_path)
 
@@ -255,28 +251,28 @@
         datamsc_file: path to Data.msc file
     """
     for file in firmware_files:
-        dut.push_system_file(file, '/vendor/firmware/')
+        dut.push_system_file(file, "/vendor/firmware/")
     dut.reboot()
 
 
 def _set_ini_fields(ini_file_path, ini_field_dict):
-    template_regex = r'^{}=[0-9,.x-]+'
-    with open(ini_file_path, 'r') as f:
+    template_regex = r"^{}=[0-9,.x-]+"
+    with open(ini_file_path, "r") as f:
         ini_lines = f.read().splitlines()
         for idx, line in enumerate(ini_lines):
             for field_name, field_value in ini_field_dict.items():
                 line_regex = re.compile(template_regex.format(field_name))
                 if re.match(line_regex, line):
-                    ini_lines[idx] = '{}={}'.format(field_name, field_value)
+                    ini_lines[idx] = "{}={}".format(field_name, field_value)
                     print(ini_lines[idx])
-    with open(ini_file_path, 'w') as f:
-        f.write('\n'.join(ini_lines) + '\n')
+    with open(ini_file_path, "w") as f:
+        f.write("\n".join(ini_lines) + "\n")
 
 
 def _edit_dut_ini(dut, ini_fields):
     """Function to edit Wifi ini files."""
-    dut_ini_path = '/vendor/firmware/wlan/qca_cld/WCNSS_qcom_cfg.ini'
-    local_ini_path = os.path.expanduser('~/WCNSS_qcom_cfg.ini')
+    dut_ini_path = "/vendor/firmware/wlan/qca_cld/WCNSS_qcom_cfg.ini"
+    local_ini_path = os.path.expanduser("~/WCNSS_qcom_cfg.ini")
     dut.pull_files(dut_ini_path, local_ini_path)
 
     _set_ini_fields(local_ini_path, ini_fields)
@@ -286,67 +282,68 @@
 
 
 def set_chain_mask(dut, chain_mask):
-    curr_mask = getattr(dut, 'chain_mask', '2x2')
+    curr_mask = getattr(dut, "chain_mask", "2x2")
     if curr_mask == chain_mask:
         return
     dut.chain_mask = chain_mask
-    if chain_mask == '2x2':
+    if chain_mask == "2x2":
         ini_fields = {
-            'gEnable2x2': 2,
-            'gSetTxChainmask1x1': 1,
-            'gSetRxChainmask1x1': 1,
-            'gDualMacFeatureDisable': 6,
-            'gDot11Mode': 0
+            "gEnable2x2": 2,
+            "gSetTxChainmask1x1": 1,
+            "gSetRxChainmask1x1": 1,
+            "gDualMacFeatureDisable": 6,
+            "gDot11Mode": 0,
         }
     else:
         ini_fields = {
-            'gEnable2x2': 0,
-            'gSetTxChainmask1x1': chain_mask + 1,
-            'gSetRxChainmask1x1': chain_mask + 1,
-            'gDualMacFeatureDisable': 1,
-            'gDot11Mode': 0
+            "gEnable2x2": 0,
+            "gSetTxChainmask1x1": chain_mask + 1,
+            "gSetRxChainmask1x1": chain_mask + 1,
+            "gDualMacFeatureDisable": 1,
+            "gDot11Mode": 0,
         }
     _edit_dut_ini(dut, ini_fields)
 
 
 def set_wifi_mode(dut, mode):
     TX_MODE_DICT = {
-        'Auto': 0,
-        '11n': 4,
-        '11ac': 9,
-        '11abg': 1,
-        '11b': 2,
-        '11': 3,
-        '11g only': 5,
-        '11n only': 6,
-        '11b only': 7,
-        '11ac only': 8
+        "Auto": 0,
+        "11n": 4,
+        "11ac": 9,
+        "11abg": 1,
+        "11b": 2,
+        "11": 3,
+        "11g only": 5,
+        "11n only": 6,
+        "11b only": 7,
+        "11ac only": 8,
     }
 
     ini_fields = {
-        'gEnable2x2': 2,
-        'gSetTxChainmask1x1': 1,
-        'gSetRxChainmask1x1': 1,
-        'gDualMacFeatureDisable': 6,
-        'gDot11Mode': TX_MODE_DICT[mode]
+        "gEnable2x2": 2,
+        "gSetTxChainmask1x1": 1,
+        "gSetRxChainmask1x1": 1,
+        "gDualMacFeatureDisable": 6,
+        "gDot11Mode": TX_MODE_DICT[mode],
     }
     _edit_dut_ini(dut, ini_fields)
 
 
-class LinkLayerStats():
-
-    LLSTATS_CMD = 'cat /d/wlan0/ll_stats'
-    PEER_REGEX = 'LL_STATS_PEER_ALL'
+class LinkLayerStats:
+    LLSTATS_CMD = "cat /d/wlan0/ll_stats"
+    PEER_REGEX = "LL_STATS_PEER_ALL"
     MCS_REGEX = re.compile(
-        r'preamble: (?P<mode>\S+), nss: (?P<num_streams>\S+), bw: (?P<bw>\S+), '
-        'mcs: (?P<mcs>\S+), bitrate: (?P<rate>\S+), txmpdu: (?P<txmpdu>\S+), '
-        'rxmpdu: (?P<rxmpdu>\S+), mpdu_lost: (?P<mpdu_lost>\S+), '
-        'retries: (?P<retries>\S+), retries_short: (?P<retries_short>\S+), '
-        'retries_long: (?P<retries_long>\S+)')
+        r"preamble: (?P<mode>\S+), nss: (?P<num_streams>\S+), bw: (?P<bw>\S+), "
+        "mcs: (?P<mcs>\S+), bitrate: (?P<rate>\S+), txmpdu: (?P<txmpdu>\S+), "
+        "rxmpdu: (?P<rxmpdu>\S+), mpdu_lost: (?P<mpdu_lost>\S+), "
+        "retries: (?P<retries>\S+), retries_short: (?P<retries_short>\S+), "
+        "retries_long: (?P<retries_long>\S+)"
+    )
     MCS_ID = collections.namedtuple(
-        'mcs_id', ['mode', 'num_streams', 'bandwidth', 'mcs', 'rate'])
-    MODE_MAP = {'0': '11a/g', '1': '11b', '2': '11n', '3': '11ac'}
-    BW_MAP = {'0': 20, '1': 40, '2': 80}
+        "mcs_id", ["mode", "num_streams", "bandwidth", "mcs", "rate"]
+    )
+    MODE_MAP = {"0": "11a/g", "1": "11b", "2": "11n", "3": "11ac"}
+    BW_MAP = {"0": 20, "1": 40, "2": 80}
 
     def __init__(self, dut, llstats_enabled=True):
         self.dut = dut
@@ -357,12 +354,11 @@
     def update_stats(self):
         if self.llstats_enabled:
             try:
-                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD,
-                                                    timeout=0.1)
+                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD, timeout=0.1)
             except:
-                llstats_output = ''
+                llstats_output = ""
         else:
-            llstats_output = ''
+            llstats_output = ""
         self._update_stats(llstats_output)
 
     def reset_stats(self):
@@ -370,21 +366,19 @@
         self.llstats_incremental = self._empty_llstats()
 
     def _empty_llstats(self):
-        return collections.OrderedDict(mcs_stats=collections.OrderedDict(),
-                                       summary=collections.OrderedDict())
+        return collections.OrderedDict(
+            mcs_stats=collections.OrderedDict(), summary=collections.OrderedDict()
+        )
 
     def _empty_mcs_stat(self):
-        return collections.OrderedDict(txmpdu=0,
-                                       rxmpdu=0,
-                                       mpdu_lost=0,
-                                       retries=0,
-                                       retries_short=0,
-                                       retries_long=0)
+        return collections.OrderedDict(
+            txmpdu=0, rxmpdu=0, mpdu_lost=0, retries=0, retries_short=0, retries_long=0
+        )
 
     def _mcs_id_to_string(self, mcs_id):
-        mcs_string = '{} {}MHz Nss{} MCS{} {}Mbps'.format(
-            mcs_id.mode, mcs_id.bandwidth, mcs_id.num_streams, mcs_id.mcs,
-            mcs_id.rate)
+        mcs_string = "{} {}MHz Nss{} MCS{} {}Mbps".format(
+            mcs_id.mode, mcs_id.bandwidth, mcs_id.num_streams, mcs_id.mcs, mcs_id.rate
+        )
         return mcs_string
 
     def _parse_mcs_stats(self, llstats_output):
@@ -397,18 +391,21 @@
         # Find and process all matches for per stream stats
         match_iter = re.finditer(self.MCS_REGEX, llstats_output)
         for match in match_iter:
-            current_mcs = self.MCS_ID(self.MODE_MAP[match.group('mode')],
-                                      int(match.group('num_streams')) + 1,
-                                      self.BW_MAP[match.group('bw')],
-                                      int(match.group('mcs')),
-                                      int(match.group('rate'), 16) / 1000)
+            current_mcs = self.MCS_ID(
+                self.MODE_MAP[match.group("mode")],
+                int(match.group("num_streams")) + 1,
+                self.BW_MAP[match.group("bw")],
+                int(match.group("mcs")),
+                int(match.group("rate"), 16) / 1000,
+            )
             current_stats = collections.OrderedDict(
-                txmpdu=int(match.group('txmpdu')),
-                rxmpdu=int(match.group('rxmpdu')),
-                mpdu_lost=int(match.group('mpdu_lost')),
-                retries=int(match.group('retries')),
-                retries_short=int(match.group('retries_short')),
-                retries_long=int(match.group('retries_long')))
+                txmpdu=int(match.group("txmpdu")),
+                rxmpdu=int(match.group("rxmpdu")),
+                mpdu_lost=int(match.group("mpdu_lost")),
+                retries=int(match.group("retries")),
+                retries_short=int(match.group("retries_short")),
+                retries_long=int(match.group("retries_long")),
+            )
             llstats_dict[self._mcs_id_to_string(current_mcs)] = current_stats
         return llstats_dict
 
@@ -419,49 +416,55 @@
         return stats_diff
 
     def _generate_stats_summary(self, llstats_dict):
-        llstats_summary = collections.OrderedDict(common_tx_mcs=None,
-                                                  common_tx_mcs_count=0,
-                                                  common_tx_mcs_freq=0,
-                                                  common_rx_mcs=None,
-                                                  common_rx_mcs_count=0,
-                                                  common_rx_mcs_freq=0,
-                                                  rx_per=float('nan'))
+        llstats_summary = collections.OrderedDict(
+            common_tx_mcs=None,
+            common_tx_mcs_count=0,
+            common_tx_mcs_freq=0,
+            common_rx_mcs=None,
+            common_rx_mcs_count=0,
+            common_rx_mcs_freq=0,
+            rx_per=float("nan"),
+        )
 
         txmpdu_count = 0
         rxmpdu_count = 0
-        for mcs_id, mcs_stats in llstats_dict['mcs_stats'].items():
-            if mcs_stats['txmpdu'] > llstats_summary['common_tx_mcs_count']:
-                llstats_summary['common_tx_mcs'] = mcs_id
-                llstats_summary['common_tx_mcs_count'] = mcs_stats['txmpdu']
-            if mcs_stats['rxmpdu'] > llstats_summary['common_rx_mcs_count']:
-                llstats_summary['common_rx_mcs'] = mcs_id
-                llstats_summary['common_rx_mcs_count'] = mcs_stats['rxmpdu']
-            txmpdu_count += mcs_stats['txmpdu']
-            rxmpdu_count += mcs_stats['rxmpdu']
+        for mcs_id, mcs_stats in llstats_dict["mcs_stats"].items():
+            if mcs_stats["txmpdu"] > llstats_summary["common_tx_mcs_count"]:
+                llstats_summary["common_tx_mcs"] = mcs_id
+                llstats_summary["common_tx_mcs_count"] = mcs_stats["txmpdu"]
+            if mcs_stats["rxmpdu"] > llstats_summary["common_rx_mcs_count"]:
+                llstats_summary["common_rx_mcs"] = mcs_id
+                llstats_summary["common_rx_mcs_count"] = mcs_stats["rxmpdu"]
+            txmpdu_count += mcs_stats["txmpdu"]
+            rxmpdu_count += mcs_stats["rxmpdu"]
         if txmpdu_count:
-            llstats_summary['common_tx_mcs_freq'] = (
-                llstats_summary['common_tx_mcs_count'] / txmpdu_count)
+            llstats_summary["common_tx_mcs_freq"] = (
+                llstats_summary["common_tx_mcs_count"] / txmpdu_count
+            )
         if rxmpdu_count:
-            llstats_summary['common_rx_mcs_freq'] = (
-                llstats_summary['common_rx_mcs_count'] / rxmpdu_count)
+            llstats_summary["common_rx_mcs_freq"] = (
+                llstats_summary["common_rx_mcs_count"] / rxmpdu_count
+            )
         return llstats_summary
 
     def _update_stats(self, llstats_output):
         # Parse stats
         new_llstats = self._empty_llstats()
-        new_llstats['mcs_stats'] = self._parse_mcs_stats(llstats_output)
+        new_llstats["mcs_stats"] = self._parse_mcs_stats(llstats_output)
         # Save old stats and set new cumulative stats
         old_llstats = self.llstats_cumulative.copy()
         self.llstats_cumulative = new_llstats.copy()
         # Compute difference between new and old stats
         self.llstats_incremental = self._empty_llstats()
-        for mcs_id, new_mcs_stats in new_llstats['mcs_stats'].items():
-            old_mcs_stats = old_llstats['mcs_stats'].get(
-                mcs_id, self._empty_mcs_stat())
-            self.llstats_incremental['mcs_stats'][
-                mcs_id] = self._diff_mcs_stats(new_mcs_stats, old_mcs_stats)
+        for mcs_id, new_mcs_stats in new_llstats["mcs_stats"].items():
+            old_mcs_stats = old_llstats["mcs_stats"].get(mcs_id, self._empty_mcs_stat())
+            self.llstats_incremental["mcs_stats"][mcs_id] = self._diff_mcs_stats(
+                new_mcs_stats, old_mcs_stats
+            )
         # Generate llstats summary
-        self.llstats_incremental['summary'] = self._generate_stats_summary(
-            self.llstats_incremental)
-        self.llstats_cumulative['summary'] = self._generate_stats_summary(
-            self.llstats_cumulative)
+        self.llstats_incremental["summary"] = self._generate_stats_summary(
+            self.llstats_incremental
+        )
+        self.llstats_cumulative["summary"] = self._generate_stats_summary(
+            self.llstats_cumulative
+        )
diff --git a/src/antlion/test_utils/wifi/wifi_power_test_utils.py b/src/antlion/test_utils/wifi/wifi_power_test_utils.py
index cbd51fc..dba8461 100644
--- a/src/antlion/test_utils/wifi/wifi_power_test_utils.py
+++ b/src/antlion/test_utils/wifi/wifi_power_test_utils.py
@@ -28,10 +28,10 @@
 # On ubuntu, sudo pip3 install scapy
 import scapy.all as scapy
 
-GET_FROM_PHONE = 'get_from_dut'
-GET_FROM_AP = 'get_from_ap'
-ENABLED_MODULATED_DTIM = 'gEnableModulatedDTIM='
-MAX_MODULATED_DTIM = 'gMaxLIModulatedDTIM='
+GET_FROM_PHONE = "get_from_dut"
+GET_FROM_AP = "get_from_ap"
+ENABLED_MODULATED_DTIM = "gEnableModulatedDTIM="
+MAX_MODULATED_DTIM = "gMaxLIModulatedDTIM="
 
 
 def change_dtim(ad, gEnableModulatedDTIM, gMaxLIModulatedDTIM=10):
@@ -42,61 +42,62 @@
         gEnableModulatedDTIM: Modulated DTIM, int
         gMaxLIModulatedDTIM: Maximum modulated DTIM, int
     """
-    ad.log.info('Sets dtim to {}'.format(gEnableModulatedDTIM))
+    ad.log.info("Sets dtim to {}".format(gEnableModulatedDTIM))
 
     # In P21 the dtim setting method changed and an AdbCommandError will take
     # place to get ini_file_phone. Thus add try/except block for the old method.
     # If error occurs, use change_dtim_adb method later. Otherwise, first trying
     # to find the ini file with DTIM settings
     try:
-        ini_file_phone = ad.adb.shell('ls /vendor/firmware/wlan/*/*.ini')
+        ini_file_phone = ad.adb.shell("ls /vendor/firmware/wlan/*/*.ini")
 
     except AdbCommandError as e:
-
         # Gets AdbCommandError, change dtim later with change_dtim_adb merthod.
         # change_dtim_adb requires that wifi connection is on.
-        ad.log.info('Gets AdbCommandError, change dtim with change_dtim_adb.')
+        ad.log.info("Gets AdbCommandError, change dtim with change_dtim_adb.")
         change_dtim_adb(ad, gEnableModulatedDTIM)
         return 0
 
-    ini_file_local = ini_file_phone.split('/')[-1]
+    ini_file_local = ini_file_phone.split("/")[-1]
 
     # Pull the file and change the DTIM to desired value
-    ad.adb.pull('{} {}'.format(ini_file_phone, ini_file_local))
+    ad.adb.pull("{} {}".format(ini_file_phone, ini_file_local))
 
-    with open(ini_file_local, 'r') as fin:
+    with open(ini_file_local, "r") as fin:
         for line in fin:
             if ENABLED_MODULATED_DTIM in line:
-                gE_old = line.strip('\n')
-                gEDTIM_old = line.strip(ENABLED_MODULATED_DTIM).strip('\n')
+                gE_old = line.strip("\n")
+                gEDTIM_old = line.strip(ENABLED_MODULATED_DTIM).strip("\n")
             if MAX_MODULATED_DTIM in line:
-                gM_old = line.strip('\n')
-                gMDTIM_old = line.strip(MAX_MODULATED_DTIM).strip('\n')
+                gM_old = line.strip("\n")
+                gMDTIM_old = line.strip(MAX_MODULATED_DTIM).strip("\n")
     fin.close()
-    if int(gEDTIM_old) == gEnableModulatedDTIM and int(
-            gMDTIM_old) == gMaxLIModulatedDTIM:
-        ad.log.info('Current DTIM is already the desired value,'
-                    'no need to reset it')
+    if (
+        int(gEDTIM_old) == gEnableModulatedDTIM
+        and int(gMDTIM_old) == gMaxLIModulatedDTIM
+    ):
+        ad.log.info("Current DTIM is already the desired value," "no need to reset it")
         return 0
 
     gE_new = ENABLED_MODULATED_DTIM + str(gEnableModulatedDTIM)
     gM_new = MAX_MODULATED_DTIM + str(gMaxLIModulatedDTIM)
 
-    sed_gE = 'sed -i \'s/{}/{}/g\' {}'.format(gE_old, gE_new, ini_file_local)
-    sed_gM = 'sed -i \'s/{}/{}/g\' {}'.format(gM_old, gM_new, ini_file_local)
+    sed_gE = "sed -i 's/{}/{}/g' {}".format(gE_old, gE_new, ini_file_local)
+    sed_gM = "sed -i 's/{}/{}/g' {}".format(gM_old, gM_new, ini_file_local)
     job.run(sed_gE)
     job.run(sed_gM)
 
     # Push the file to the phone
     push_file_to_phone(ad, ini_file_local, ini_file_phone)
-    ad.log.info('DTIM changes checked in and rebooting...')
+    ad.log.info("DTIM changes checked in and rebooting...")
     ad.reboot()
     # Wait for auto-wifi feature to start
     time.sleep(20)
-    ad.adb.shell('dumpsys battery set level 100')
-    ad.log.info('DTIM updated and device back from reboot')
+    ad.adb.shell("dumpsys battery set level 100")
+    ad.log.info("DTIM updated and device back from reboot")
     return 1
 
+
 def change_dtim_adb(ad, gEnableModulatedDTIM):
     """Function to change the DTIM setting in the P21 phone.
 
@@ -106,62 +107,66 @@
         ad: the target android device, AndroidDevice object
         gEnableModulatedDTIM: Modulated DTIM, int
     """
-    ad.log.info('Changes DTIM to {} with adb'.format(gEnableModulatedDTIM))
+    ad.log.info("Changes DTIM to {} with adb".format(gEnableModulatedDTIM))
     ad.adb.root()
-    screen_status = ad.adb.shell('dumpsys nfc | grep Screen')
-    screen_is_on = 'ON_UNLOCKED' in screen_status
+    screen_status = ad.adb.shell("dumpsys nfc | grep Screen")
+    screen_is_on = "ON_UNLOCKED" in screen_status
 
     # To read the dtim with 'adb shell wl bcn_li_dtim', the screen should be off
     if screen_is_on:
-        ad.log.info('The screen is on. Set it to off before change dtim')
+        ad.log.info("The screen is on. Set it to off before change dtim")
         ad.droid.goToSleepNow()
         time_limit_seconds = 60
         _wait_screen_off(ad, time_limit_seconds)
 
     old_dtim = _read_dtim_adb(ad)
-    ad.log.info('The dtim before change is {}'.format(old_dtim))
+    ad.log.info("The dtim before change is {}".format(old_dtim))
     try:
         if int(old_dtim) == gEnableModulatedDTIM:
-            ad.log.info('Current DTIM is already the desired value,'
-                        'no need to reset it')
+            ad.log.info(
+                "Current DTIM is already the desired value," "no need to reset it"
+            )
             if screen_is_on:
-                ad.log.info('Changes the screen to the original on status')
+                ad.log.info("Changes the screen to the original on status")
                 ad.droid.wakeUpNow()
             return
     except Exception as e:
-        ad.log.info('old_dtim is not available from adb')
+        ad.log.info("old_dtim is not available from adb")
 
     current_dtim = _set_dtim(ad, gEnableModulatedDTIM)
-    ad.log.info(
-        'Old DTIM is {}, current DTIM is {}'.format(old_dtim, current_dtim))
+    ad.log.info("Old DTIM is {}, current DTIM is {}".format(old_dtim, current_dtim))
     if screen_is_on:
-        ad.log.info('Changes the screen to the original on status')
+        ad.log.info("Changes the screen to the original on status")
         ad.droid.wakeUpNow()
 
+
 def _set_dtim(ad, gEnableModulatedDTIM):
     out = ad.adb.shell("halutil -dtim_config {}".format(gEnableModulatedDTIM))
-    ad.log.info('set dtim to {}, stdout: {}'.format(
-        gEnableModulatedDTIM, out))
+    ad.log.info("set dtim to {}, stdout: {}".format(gEnableModulatedDTIM, out))
     return _read_dtim_adb(ad)
 
+
 def _read_dtim_adb(ad):
     try:
-        old_dtim = ad.adb.shell('wl bcn_li_dtim')
+        old_dtim = ad.adb.shell("wl bcn_li_dtim")
         return old_dtim
     except Exception as e:
-        ad.log.info('When reading dtim get error {}'.format(e))
-        return 'The dtim value is not available from adb'
+        ad.log.info("When reading dtim get error {}".format(e))
+        return "The dtim value is not available from adb"
+
 
 def _wait_screen_off(ad, time_limit_seconds):
     while time_limit_seconds > 0:
-        screen_status = ad.adb.shell('dumpsys nfc | grep Screen')
-        if 'OFF_UNLOCKED' in screen_status:
-            ad.log.info('The screen status is {}'.format(screen_status))
+        screen_status = ad.adb.shell("dumpsys nfc | grep Screen")
+        if "OFF_UNLOCKED" in screen_status:
+            ad.log.info("The screen status is {}".format(screen_status))
             return
         time.sleep(1)
         time_limit_seconds -= 1
-    raise TimeoutError('Timed out while waiting the screen off after {} '
-                       'seconds.'.format(time_limit_seconds))
+    raise TimeoutError(
+        "Timed out while waiting the screen off after {} "
+        "seconds.".format(time_limit_seconds)
+    )
 
 
 def push_file_to_phone(ad, file_local, file_phone):
@@ -174,16 +179,16 @@
     """
     ad.adb.root()
     cmd_out = ad.adb.remount()
-    if 'Permission denied' in cmd_out:
-        ad.log.info('Need to disable verity first and reboot')
+    if "Permission denied" in cmd_out:
+        ad.log.info("Need to disable verity first and reboot")
         ad.adb.disable_verity()
         time.sleep(1)
         ad.reboot()
-        ad.log.info('Verity disabled and device back from reboot')
+        ad.log.info("Verity disabled and device back from reboot")
         ad.adb.root()
         ad.adb.remount()
     time.sleep(1)
-    ad.adb.push('{} {}'.format(file_local, file_phone))
+    ad.adb.push("{} {}".format(file_local, file_phone))
 
 
 def ap_setup(ap, network, bandwidth=80, dtim_period=None):
@@ -203,8 +208,7 @@
     ssid = network[wutils.WifiEnums.SSID_KEY]
     if "password" in network.keys():
         password = network["password"]
-        security = hostapd_security.Security(
-            security_mode="wpa", password=password)
+        security = hostapd_security.Security(security_mode="wpa", password=password)
     else:
         security = hostapd_security.Security(security_mode=None, password=None)
     channel = network["channel"]
@@ -215,12 +219,14 @@
         security=security,
         bss_settings=bss_settings,
         vht_bandwidth=bandwidth,
-        profile_name='whirlwind',
+        profile_name="whirlwind",
         iface_wlan_2g=ap.wlan_2g,
-        iface_wlan_5g=ap.wlan_5g)
+        iface_wlan_5g=ap.wlan_5g,
+    )
     config_bridge = ap.generate_bridge_configs(channel)
-    brconfigs = bi.BridgeInterfaceConfigs(config_bridge[0], config_bridge[1],
-                                          config_bridge[2])
+    brconfigs = bi.BridgeInterfaceConfigs(
+        config_bridge[0], config_bridge[1], config_bridge[2]
+    )
     ap.bridge.startup(brconfigs)
     ap.start_ap(config)
     log.info("AP started on channel {} with SSID {}".format(channel, ssid))
@@ -241,8 +247,11 @@
 
     """
     log = logging.getLogger()
-    ad.adb.shell_nb("nohup >/dev/null 2>&1 sh -c 'iperf3 -c {} {} &'".format(
-        server_host, extra_args))
+    ad.adb.shell_nb(
+        "nohup >/dev/null 2>&1 sh -c 'iperf3 -c {} {} &'".format(
+            server_host, extra_args
+        )
+    )
     log.info("IPerf client started")
 
 
@@ -254,7 +263,7 @@
     Returns:
         RSSI: the rssi level of the device
     """
-    RSSI = ad.droid.wifiGetConnectionInfo()['rssi']
+    RSSI = ad.droid.wifiGetConnectionInfo()["rssi"]
     return RSSI
 
 
@@ -266,7 +275,7 @@
     Returns:
         IP: IP address of the phone for WiFi, as a string
     """
-    IP = ad.droid.connectivityGetIPv4Addresses('wlan0')[0]
+    IP = ad.droid.connectivityGetIPv4Addresses("wlan0")[0]
 
     return IP
 
@@ -292,7 +301,7 @@
     Returns:
         IPv6: IPv6 address of the phone for WiFi, as a string
     """
-    IPv6 = ad.droid.connectivityGetLinkLocalIpv6Address('wlan0')[:-6]
+    IPv6 = ad.droid.connectivityGetLinkLocalIpv6Address("wlan0")[:-6]
 
     return IPv6
 
@@ -314,17 +323,17 @@
     reset_host_interface(interface_name)
     start_time = time.time()
     time_limit_seconds = 60
-    ip = '0.0.0.0'
+    ip = "0.0.0.0"
     while start_time + time_limit_seconds > time.time():
         ip = scapy.get_if_addr(interface_name)
-        if ip == '0.0.0.0':
+        if ip == "0.0.0.0":
             time.sleep(1)
         else:
-            log.info(
-                'DHCP address assigned to %s as %s' % (interface_name, ip))
+            log.info("DHCP address assigned to %s as %s" % (interface_name, ip))
             return ip
-    raise TimeoutError('Timed out while getting if_addr after %s seconds.' %
-                       time_limit_seconds)
+    raise TimeoutError(
+        "Timed out while getting if_addr after %s seconds." % time_limit_seconds
+    )
 
 
 def reset_host_interface(intferface_name):
@@ -334,15 +343,15 @@
         intferface_name: the desired interface to reset
     """
     log = logging.getLogger()
-    intf_down_cmd = 'ifconfig %s down' % intferface_name
-    intf_up_cmd = 'ifconfig %s up' % intferface_name
+    intf_down_cmd = "ifconfig %s down" % intferface_name
+    intf_up_cmd = "ifconfig %s up" % intferface_name
     try:
         job.run(intf_down_cmd)
         time.sleep(10)
         job.run(intf_up_cmd)
-        log.info('{} has been reset'.format(intferface_name))
+        log.info("{} has been reset".format(intferface_name))
     except job.Error:
-        raise Exception('No such interface')
+        raise Exception("No such interface")
 
 
 def bringdown_host_interface(intferface_name):
@@ -352,13 +361,13 @@
         intferface_name: the desired interface to reset
     """
     log = logging.getLogger()
-    intf_down_cmd = 'ifconfig %s down' % intferface_name
+    intf_down_cmd = "ifconfig %s down" % intferface_name
     try:
         job.run(intf_down_cmd)
         time.sleep(2)
-        log.info('{} has been brought down'.format(intferface_name))
+        log.info("{} has been brought down".format(intferface_name))
     except job.Error:
-        raise Exception('No such interface')
+        raise Exception("No such interface")
 
 
 def create_pkt_config(test_class):
@@ -370,9 +379,11 @@
     Returns:
         Dictionary with the multicast packet config
     """
-    addr_type = (scapy.IPV6_ADDR_LINKLOCAL
-                 if test_class.ipv6_src_type == 'LINK_LOCAL' else
-                 scapy.IPV6_ADDR_GLOBAL)
+    addr_type = (
+        scapy.IPV6_ADDR_LINKLOCAL
+        if test_class.ipv6_src_type == "LINK_LOCAL"
+        else scapy.IPV6_ADDR_GLOBAL
+    )
 
     mac_dst = test_class.mac_dst
     if GET_FROM_PHONE in test_class.mac_dst:
@@ -391,15 +402,15 @@
         ipv4_gw = test_class.access_point.ssh_settings.hostname
 
     pkt_gen_config = {
-        'interf': test_class.pkt_sender.interface,
-        'subnet_mask': test_class.sub_mask,
-        'src_mac': test_class.mac_src,
-        'dst_mac': mac_dst,
-        'src_ipv4': test_class.ipv4_src,
-        'dst_ipv4': ipv4_dst,
-        'src_ipv6': test_class.ipv6_src,
-        'src_ipv6_type': addr_type,
-        'dst_ipv6': ipv6_dst,
-        'gw_ipv4': ipv4_gw
+        "interf": test_class.pkt_sender.interface,
+        "subnet_mask": test_class.sub_mask,
+        "src_mac": test_class.mac_src,
+        "dst_mac": mac_dst,
+        "src_ipv4": test_class.ipv4_src,
+        "dst_ipv4": ipv4_dst,
+        "src_ipv6": test_class.ipv6_src,
+        "src_ipv6_type": addr_type,
+        "dst_ipv6": ipv6_dst,
+        "gw_ipv4": ipv4_gw,
     }
     return pkt_gen_config
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/__init__.py b/src/antlion/test_utils/wifi/wifi_retail_ap/__init__.py
deleted file mode 100644
index 0046ed6..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/__init__.py
+++ /dev/null
@@ -1,552 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections.abc
-import copy
-import fcntl
-import importlib
-import os
-import selenium
-import splinter
-import time
-from antlion import logger
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-def create(configs):
-    """Factory method for retail AP class.
-
-    Args:
-        configs: list of dicts containing ap settings. ap settings must contain
-        the following: brand, model, ip_address, username and password
-    """
-    SUPPORTED_APS = {
-        ('Netgear', 'R7000'): {
-            'name': 'NetgearR7000AP',
-            'package': 'netgear_r7000'
-        },
-        ('Netgear', 'R7000NA'): {
-            'name': 'NetgearR7000NAAP',
-            'package': 'netgear_r7000'
-        },
-        ('Netgear', 'R7500'): {
-            'name': 'NetgearR7500AP',
-            'package': 'netgear_r7500'
-        },
-        ('Netgear', 'R7500NA'): {
-            'name': 'NetgearR7500NAAP',
-            'package': 'netgear_r7500'
-        },
-        ('Netgear', 'R7800'): {
-            'name': 'NetgearR7800AP',
-            'package': 'netgear_r7800'
-        },
-        ('Netgear', 'R8000'): {
-            'name': 'NetgearR8000AP',
-            'package': 'netgear_r8000'
-        },
-        ('Netgear', 'RAX80'): {
-            'name': 'NetgearRAX80AP',
-            'package': 'netgear_rax80'
-        },
-        ('Netgear', 'RAX120'): {
-            'name': 'NetgearRAX120AP',
-            'package': 'netgear_rax120'
-        },
-        ('Netgear', 'RAX200'): {
-            'name': 'NetgearRAX200AP',
-            'package': 'netgear_rax200'
-        },
-        ('Netgear', 'RAXE500'): {
-            'name': 'NetgearRAXE500AP',
-            'package': 'netgear_raxe500'
-        },
-        ('Brcm', 'Reference'): {
-            'name': 'BrcmRefAP',
-            'package': 'brcm_ref'
-        },
-        ('Google', 'Wifi'): {
-            'name': 'GoogleWifiAP',
-            'package': 'google_wifi'
-        },
-    }
-    objs = []
-    for config in configs:
-        ap_id = (config['brand'], config['model'])
-        if ap_id not in SUPPORTED_APS:
-            raise KeyError('Invalid retail AP brand and model combination.')
-        ap_class_dict = SUPPORTED_APS[ap_id]
-        ap_package = 'acts_contrib.test_utils.wifi.wifi_retail_ap.{}'.format(
-            ap_class_dict['package'])
-        ap_package = importlib.import_module(ap_package)
-        ap_class = getattr(ap_package, ap_class_dict['name'])
-        objs.append(ap_class(config))
-    return objs
-
-
-def destroy(objs):
-    for obj in objs:
-        obj.teardown()
-
-
-class BlockingBrowser(splinter.driver.webdriver.chrome.WebDriver):
-    """Class that implements a blocking browser session on top of selenium.
-
-    The class inherits from and builds upon splinter/selenium's webdriver class
-    and makes sure that only one such webdriver is active on a machine at any
-    single time. The class ensures single session operation using a lock file.
-    The class is to be used within context managers (e.g. with statements) to
-    ensure locks are always properly released.
-    """
-
-    def __init__(self, headless, timeout):
-        """Constructor for BlockingBrowser class.
-
-        Args:
-            headless: boolean to control visible/headless browser operation
-            timeout: maximum time allowed to launch browser
-        """
-        self.log = logger.create_tagged_trace_logger('ChromeDriver')
-        self.chrome_options = splinter.driver.webdriver.chrome.Options()
-        self.chrome_options.add_argument('--no-proxy-server')
-        self.chrome_options.add_argument('--no-sandbox')
-        self.chrome_options.add_argument('--allow-running-insecure-content')
-        self.chrome_options.add_argument('--ignore-certificate-errors')
-        self.chrome_capabilities = selenium.webdriver.common.desired_capabilities.DesiredCapabilities.CHROME.copy(
-        )
-        self.chrome_capabilities['acceptSslCerts'] = True
-        self.chrome_capabilities['acceptInsecureCerts'] = True
-        if headless:
-            self.chrome_options.add_argument('--headless')
-            self.chrome_options.add_argument('--disable-gpu')
-        self.lock_file_path = '/usr/local/bin/chromedriver'
-        self.timeout = timeout
-
-    def __enter__(self):
-        """Entry context manager for BlockingBrowser.
-
-        The enter context manager for BlockingBrowser attempts to lock the
-        browser file. If successful, it launches and returns a chromedriver
-        session. If an exception occurs while starting the browser, the lock
-        file is released.
-        """
-        self.lock_file = open(self.lock_file_path, 'r')
-        start_time = time.time()
-        while time.time() < start_time + self.timeout:
-            try:
-                fcntl.flock(self.lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
-            except BlockingIOError:
-                time.sleep(BROWSER_WAIT_SHORT)
-                continue
-            try:
-                self.driver = selenium.webdriver.Chrome(
-                    options=self.chrome_options,
-                    desired_capabilities=self.chrome_capabilities)
-                self.element_class = splinter.driver.webdriver.WebDriverElement
-                self._cookie_manager = splinter.driver.webdriver.cookie_manager.CookieManager(
-                    self.driver)
-                super(splinter.driver.webdriver.chrome.WebDriver,
-                      self).__init__(2)
-                return super(BlockingBrowser, self).__enter__()
-            except:
-                fcntl.flock(self.lock_file, fcntl.LOCK_UN)
-                self.lock_file.close()
-                raise RuntimeError('Error starting browser. '
-                                   'Releasing lock file.')
-        raise TimeoutError('Could not start chrome browser in time.')
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        """Exit context manager for BlockingBrowser.
-
-        The exit context manager simply calls the parent class exit and
-        releases the lock file.
-        """
-        try:
-            super(BlockingBrowser, self).__exit__(exc_type, exc_value,
-                                                  traceback)
-        except:
-            raise RuntimeError('Failed to quit browser. Releasing lock file.')
-        finally:
-            fcntl.flock(self.lock_file, fcntl.LOCK_UN)
-            self.lock_file.close()
-
-    def restart(self):
-        """Method to restart browser session without releasing lock file."""
-        self.quit()
-        self.__enter__()
-
-    def visit_persistent(self,
-                         url,
-                         page_load_timeout,
-                         num_tries,
-                         backup_url='about:blank',
-                         check_for_element=None):
-        """Method to visit webpages and retry upon failure.
-
-        The function visits a URL and checks that the resulting URL matches
-        the intended URL, i.e. no redirects have happened
-
-        Args:
-            url: the intended url
-            page_load_timeout: timeout for page visits
-            num_tries: number of tries before url is declared unreachable
-            backup_url: url to visit if first url is not reachable. This can be
-            used to simply refresh the browser and try again or to re-login to
-            the AP
-            check_for_element: element id to check for existence on page
-        """
-        self.driver.set_page_load_timeout(page_load_timeout)
-        for idx in range(num_tries):
-            try:
-                self.visit(url)
-            except:
-                self.restart()
-
-            page_reached = self.url.split('/')[-1] == url.split('/')[-1]
-            if check_for_element:
-                time.sleep(BROWSER_WAIT_MED)
-                element = self.find_by_id(check_for_element)
-                if not element:
-                    page_reached = 0
-            if page_reached:
-                break
-            else:
-                try:
-                    self.visit(backup_url)
-                except:
-                    self.restart()
-
-            if idx == num_tries - 1:
-                self.log.error('URL unreachable. Current URL: {}'.format(
-                    self.url))
-                raise RuntimeError('URL unreachable.')
-
-
-class WifiRetailAP(object):
-    """Base class implementation for retail ap.
-
-    Base class provides functions whose implementation is shared by all aps.
-    If some functions such as set_power not supported by ap, checks will raise
-    exceptions.
-    """
-
-    def __init__(self, ap_settings):
-        self.ap_settings = ap_settings.copy()
-        self.log = logger.create_tagged_trace_logger('AccessPoint|{}'.format(
-            self._get_control_ip_address()))
-        # Capabilities variable describing AP capabilities
-        self.capabilities = {
-            'interfaces': [],
-            'channels': {},
-            'modes': {},
-            'default_mode': None
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings.setdefault(interface, {})
-        # Lock AP
-        if self.ap_settings.get('lock_ap', 0):
-            self.lock_timeout = self.ap_settings.get('lock_timeout', 3600)
-            self._lock_ap()
-
-    def teardown(self):
-        """Function to perform destroy operations."""
-        if self.ap_settings.get('lock_ap', 0):
-            self._unlock_ap()
-
-    def reset(self):
-        """Function that resets AP.
-
-        Function implementation is AP dependent and intended to perform any
-        necessary reset operations as part of controller destroy.
-        """
-
-    def read_ap_settings(self):
-        """Function that reads current ap settings.
-
-        Function implementation is AP dependent and thus base class raises exception
-        if function not implemented in child class.
-        """
-        raise NotImplementedError
-
-    def validate_ap_settings(self):
-        """Function to validate ap settings.
-
-        This function compares the actual ap settings read from the web GUI
-        with the assumed settings saved in the AP object. When called after AP
-        configuration, this method helps ensure that our configuration was
-        successful.
-        Note: Calling this function updates the stored ap_settings
-
-        Raises:
-            ValueError: If read AP settings do not match stored settings.
-        """
-        assumed_ap_settings = copy.deepcopy(self.ap_settings)
-        actual_ap_settings = self.read_ap_settings()
-
-        if assumed_ap_settings != actual_ap_settings:
-            self.log.warning(
-                'Discrepancy in AP settings. Some settings may have been overwritten.'
-            )
-
-    def configure_ap(self, **config_flags):
-        """Function that configures ap based on values of ap_settings.
-
-        Function implementation is AP dependent and thus base class raises exception
-        if function not implemented in child class.
-
-        Args:
-            config_flags: optional configuration flags
-        """
-        raise NotImplementedError
-
-    def set_region(self, region):
-        """Function that sets AP region.
-
-        This function sets the region for the AP. Note that this may overwrite
-        channel and bandwidth settings in cases where the new region does not
-        support the current wireless configuration.
-
-        Args:
-            region: string indicating AP region
-        """
-        if region != self.ap_settings['region']:
-            self.log.warning(
-                'Updating region may overwrite wireless settings.')
-        setting_to_update = {'region': region}
-        self.update_ap_settings(setting_to_update)
-
-    def set_radio_on_off(self, network, status):
-        """Function that turns the radio on or off.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            status: boolean indicating on or off (0: off, 1: on)
-        """
-        setting_to_update = {network: {'status': int(status)}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_ssid(self, network, ssid):
-        """Function that sets network SSID.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            ssid: string containing ssid
-        """
-        setting_to_update = {network: {'ssid': str(ssid)}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel(self, network, channel):
-        """Function that sets network channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string or int containing channel
-        """
-        if channel not in self.capabilities['channels'][network]:
-            self.log.error('Ch{} is not supported on {} interface.'.format(
-                channel, network))
-        setting_to_update = {network: {'channel': channel}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_bandwidth(self, network, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        elif isinstance(bandwidth, int):
-            bandwidth = str(bandwidth) + self.capabilities['default_mode']
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        setting_to_update = {network: {'bandwidth': bandwidth}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel_and_bandwidth(self, network, channel, bandwidth):
-        """Function that sets network bandwidth/mode and channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string containing desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        elif isinstance(bandwidth, int):
-            bandwidth = str(bandwidth) + self.capabilities['default_mode']
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        if channel not in self.capabilities['channels'][network]:
-            self.log.error('Ch{} is not supported on {} interface.'.format(
-                channel, network))
-        setting_to_update = {
-            network: {
-                'bandwidth': bandwidth,
-                'channel': channel
-            }
-        }
-        self.update_ap_settings(setting_to_update)
-
-    def set_power(self, network, power):
-        """Function that sets network transmit power.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            power: string containing power level, e.g., 25%, 100%
-        """
-        if 'power' not in self.ap_settings[network].keys():
-            self.log.error(
-                'Cannot configure power on {} interface.'.format(network))
-        setting_to_update = {network: {'power': power}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_security(self, network, security_type, *password):
-        """Function that sets network security setting and password.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            security: string containing security setting, e.g., WPA2-PSK
-            password: optional argument containing password
-        """
-        if (len(password) == 1) and (type(password[0]) == str):
-            setting_to_update = {
-                network: {
-                    'security_type': str(security_type),
-                    'password': str(password[0])
-                }
-            }
-        else:
-            setting_to_update = {
-                network: {
-                    'security_type': str(security_type)
-                }
-            }
-        self.update_ap_settings(setting_to_update)
-
-    def set_rate(self):
-        """Function that configures rate used by AP.
-
-        Function implementation is not supported by most APs and thus base
-        class raises exception if function not implemented in child class.
-        """
-        raise NotImplementedError
-
-    def _update_settings_dict(self,
-                              settings,
-                              updates,
-                              updates_requested=False,
-                              status_toggle_flag=False):
-        new_settings = copy.deepcopy(settings)
-        for key, value in updates.items():
-            if key not in new_settings.keys():
-                raise KeyError('{} is an invalid settings key.'.format(key))
-            elif isinstance(value, collections.abc.Mapping):
-                new_settings[
-                    key], updates_requested, status_toggle_flag = self._update_settings_dict(
-                        new_settings.get(key, {}), value, updates_requested,
-                        status_toggle_flag)
-            elif new_settings[key] != value:
-                new_settings[key] = value
-                updates_requested = True
-                if 'status' in key:
-                    status_toggle_flag = True
-        return new_settings, updates_requested, status_toggle_flag
-
-    def update_ap_settings(self, dict_settings={}, **named_settings):
-        """Function to update settings of existing AP.
-
-        Function copies arguments into ap_settings and calls configure_retail_ap
-        to apply them.
-
-        Args:
-            *dict_settings accepts single dictionary of settings to update
-            **named_settings accepts named settings to update
-            Note: dict and named_settings cannot contain the same settings.
-        """
-        settings_to_update = dict(dict_settings, **named_settings)
-        if len(settings_to_update) != len(dict_settings) + len(named_settings):
-            raise KeyError('The following keys were passed twice: {}'.format(
-                (set(dict_settings.keys()).intersection(
-                    set(named_settings.keys())))))
-
-        self.ap_settings, updates_requested, status_toggle_flag = self._update_settings_dict(
-            self.ap_settings, settings_to_update)
-
-        if updates_requested:
-            self.configure_ap(status_toggled=status_toggle_flag)
-
-    def band_lookup_by_channel(self, channel):
-        """Function that gives band name by channel number.
-
-        Args:
-            channel: channel number to lookup
-        Returns:
-            band: name of band which this channel belongs to on this ap, False
-            if not supported
-        """
-        for key, value in self.capabilities['channels'].items():
-            if channel in value:
-                return key
-        return False
-
-    def _get_control_ip_address(self):
-        """Function to get AP's Control Interface IP address."""
-        if 'ssh_config' in self.ap_settings.keys():
-            return self.ap_settings['ssh_config']['host']
-        else:
-            return self.ap_settings['ip_address']
-
-    def _lock_ap(self):
-        """Function to lock the ap while tests are running."""
-        self.lock_file_path = '/tmp/{}_{}_{}.lock'.format(
-            self.ap_settings['brand'], self.ap_settings['model'],
-            self._get_control_ip_address())
-        if not os.path.exists(self.lock_file_path):
-            with open(self.lock_file_path, 'w'):
-                pass
-        self.lock_file = open(self.lock_file_path, 'r')
-        start_time = time.time()
-        self.log.info('Trying to acquire AP lock.')
-        while time.time() < start_time + self.lock_timeout:
-            try:
-                fcntl.flock(self.lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
-            except BlockingIOError:
-                time.sleep(BROWSER_WAIT_SHORT)
-                continue
-            self.log.info('AP lock acquired.')
-            return
-        raise RuntimeError('Could not lock AP in time.')
-
-    def _unlock_ap(self):
-        """Function to unlock the AP when tests are done."""
-        self.log.info('Releasing AP lock.')
-        if hasattr(self, 'lock_file'):
-            try:
-                fcntl.flock(self.lock_file, fcntl.LOCK_UN)
-                self.lock_file.close()
-                self.log.info('Succussfully released AP lock file.')
-            except:
-                raise RuntimeError('Error occurred while unlocking AP.')
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/brcm_ref.py b/src/antlion/test_utils/wifi/wifi_retail_ap/brcm_ref.py
deleted file mode 100644
index d5461f8..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/brcm_ref.py
+++ /dev/null
@@ -1,243 +0,0 @@
-import collections
-import numpy
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 10
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class BrcmRefAP(WifiRetailAP):
-    """Class that implements Netgear RAX200 AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        self.config_page = ('{protocol}://{username}:{password}@'
-                            '{ip_address}:{port}/info.html').format(
-                                protocol=self.ap_settings['protocol'],
-                                username=self.ap_settings['admin_username'],
-                                password=self.ap_settings['admin_password'],
-                                ip_address=self.ap_settings['ip_address'],
-                                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'wlrouter/radio.asp').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-
-        self.capabilities = {
-            'interfaces': ['2G_5G', '6G'],
-            'channels': {
-                '2G_5G': [
-                    1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 52, 56,
-                    60, 64, 100, 104, 108, 112, 116, 120, 124, 128, 132, 136,
-                    140, 144, 149, 153, 157, 161, 165
-                ],
-                '6G': ['6g' + str(ch) for ch in numpy.arange(1, 222, 4)]
-            },
-            'modes': {
-                '2G_5G': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ],
-                '6G': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ]
-            },
-            'default_mode': 'HE'
-        }
-        self.ap_settings['region'] = 'United States'
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {
-                'ssid': 'BrcmAP0' if interface == '6G' else 'BrcmAP1',
-                'security_type': 'Open',
-                'password': '1234567890'
-            }
-        self.config_page_fields = collections.OrderedDict({
-            ('2G_5G', 'interface'): ('wl_unit', 1),
-            ('2G_5G', 'band'):
-            'wl_nband',
-            ('2G_5G', 'bandwidth'):
-            'wl_bw_cap',
-            ('2G_5G', 'channel'):
-            'wl_chanspec',
-            ('6G', 'interface'): ('wl_unit', 0),
-            ('6G', 'band'):
-            'wl_nband',
-            ('6G', 'bandwidth'):
-            'wl_bw_cap',
-            ('6G', 'channel'):
-            'wl_chanspec',
-        })
-
-        self.band_mode_values = {'1': '5 GHz', '2': '2.4 GHz', '4': '6 GHz'}
-
-        self.band_values = {'5 GHz': 1, '2.4 GHz': 2, '6 GHz': 4}
-
-        self.bandwidth_mode_values = {
-            '1': 'HE20',
-            '3': 'HE40',
-            '7': 'HE80',
-            '15': 'HE160'
-        }
-
-    def _decode_channel_string(self, channel_string):
-        if channel_string == '0':
-            return 'Auto'
-        if 'u' in channel_string or 'l' in channel_string:
-            channel_string = channel_string[0:-1]
-        elif len(channel_string.split('/')) > 1:
-            channel_string = channel_string.split('/')[0]
-        if '6g' in channel_string:
-            return channel_string
-        else:
-            return int(channel_string)
-
-    def _get_channel_str(self, interface, channel, bandwidth):
-        bandwidth = int(''.join([x for x in bandwidth if x.isdigit()]))
-        if bandwidth == 20:
-            channel_str = str(channel)
-        elif bandwidth in [80, 160]:
-            channel_str = str(channel) + '/' + str(bandwidth)
-        elif interface == '6G' and bandwidth == 40:
-            channel_str = str(channel) + '/' + str(bandwidth)
-        elif interface == '2G_5G' and bandwidth == 40:
-            lower_lookup = [
-                36, 44, 52, 60, 100, 108, 116, 124, 132, 140, 149, 157
-            ]
-            if int(channel) in lower_lookup:
-                channel_str = str(channel) + 'l'
-            else:
-                channel_str = str(channel) + 'u'
-        return channel_str
-
-    def read_ap_settings(self):
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_nologin,
-                                     BROWSER_WAIT_MED, 10, self.config_page)
-
-            for key in self.config_page_fields.keys():
-                if 'interface' in key:
-                    config_item = browser.find_by_name(
-                        self.config_page_fields[key][0]).first
-                    config_item.select(self.config_page_fields[key][1])
-                    time.sleep(BROWSER_WAIT_SHORT)
-                else:
-                    config_item = browser.find_by_name(
-                        self.config_page_fields[key]).first
-                    if 'band' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self.band_mode_values[config_item.value]
-                    elif 'bandwidth' in key:
-                        self.ap_settings[key[0]][key[
-                            1]] = self.bandwidth_mode_values[config_item.value]
-                    elif 'channel' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self._decode_channel_string(
-                                config_item.value)
-                    else:
-                        self.ap_settings[key[0]][key[1]] = config_item.value
-
-    def update_ap_settings(self, dict_settings={}, **named_settings):
-        """Function to update settings of existing AP.
-
-        Function copies arguments into ap_settings and calls configure_ap
-        to apply them.
-
-        Args:
-            dict_settings: single dictionary of settings to update
-            **named_settings: named settings to update
-            Note: dict and named_settings cannot contain the same settings.
-        """
-
-        settings_to_update = dict(dict_settings, **named_settings)
-        if len(settings_to_update) != len(dict_settings) + len(named_settings):
-            raise KeyError('The following keys were passed twice: {}'.format(
-                (set(dict_settings.keys()).intersection(
-                    set(named_settings.keys())))))
-
-        updating_6G = '6G' in settings_to_update.keys()
-        updating_2G_5G = '2G_5G' in settings_to_update.keys()
-
-        if updating_2G_5G:
-            if 'channel' in settings_to_update['2G_5G']:
-                band = '2.4 GHz' if int(
-                    settings_to_update['2G_5G']['channel']) < 13 else '5 GHz'
-                if band == '2.4 GHz':
-                    settings_to_update['2G_5G']['bandwidth'] = 'HE20'
-                settings_to_update['2G_5G']['band'] = band
-        self.ap_settings, updates_requested, status_toggle_flag = self._update_settings_dict(
-            self.ap_settings, settings_to_update)
-        if updates_requested:
-            self.configure_ap(updating_2G_5G, updating_6G)
-
-    def configure_ap(self, updating_2G_5G, updating_6G):
-
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-
-            interfaces_to_update = []
-            if updating_2G_5G:
-                interfaces_to_update.append('2G_5G')
-            if updating_6G:
-                interfaces_to_update.append('6G')
-            for interface in interfaces_to_update:
-                # Visit URL
-                browser.visit_persistent(self.config_page, BROWSER_WAIT_MED,
-                                         10)
-                browser.visit_persistent(self.config_page_nologin,
-                                         BROWSER_WAIT_MED, 10,
-                                         self.config_page)
-
-                config_item = browser.find_by_name(
-                    self.config_page_fields[(interface, 'interface')][0]).first
-                config_item.select(self.config_page_fields[(interface,
-                                                            'interface')][1])
-                time.sleep(BROWSER_WAIT_SHORT)
-
-                for key, value in self.config_page_fields.items():
-                    if 'interface' in key or interface not in key:
-                        continue
-                    config_item = browser.find_by_name(
-                        self.config_page_fields[key]).first
-                    if 'band' in key:
-                        config_item.select(
-                            self.band_values[self.ap_settings[key[0]][key[1]]])
-                    elif 'bandwidth' in key:
-                        config_item.select_by_text(
-                            str(self.ap_settings[key[0]][key[1]])[2:] + ' MHz')
-                    elif 'channel' in key:
-                        channel_str = self._get_channel_str(
-                            interface, self.ap_settings[interface][key[1]],
-                            self.ap_settings[interface]['bandwidth'])
-                        config_item.select_by_text(channel_str)
-                    else:
-                        self.ap_settings[key[0]][key[1]] = config_item.value
-                    time.sleep(BROWSER_WAIT_SHORT)
-                # Apply
-                config_item = browser.find_by_name('action')
-                config_item.first.click()
-                time.sleep(BROWSER_WAIT_MED)
-                config_item = browser.find_by_name('action')
-                time.sleep(BROWSER_WAIT_SHORT)
-                config_item.first.click()
-                time.sleep(BROWSER_WAIT_LONG)
-                browser.visit_persistent(self.config_page, BROWSER_WAIT_LONG,
-                                     10)
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/google_wifi.py b/src/antlion/test_utils/wifi/wifi_retail_ap/google_wifi.py
deleted file mode 100644
index ee2a821..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/google_wifi.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers import access_point
-from antlion.controllers.ap_lib import bridge_interface
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-
-
-class GoogleWifiAP(WifiRetailAP):
-    """ Class that implements Google Wifi AP.
-
-    This class is a work in progress
-    """
-
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        # Initialize AP
-        if self.ap_settings['2G']['status'] and self.ap_settings['5G_1'][
-                'status']:
-            raise ValueError('Error initializing Google Wifi AP. '
-                             'Only one interface can be enabled at a time.')
-
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings.setdefault(interface, {})
-
-        self.BW_MODE_MAP = {
-            'legacy': 20,
-            'VHT20': 20,
-            'VHT40': 40,
-            'VHT80': 80
-        }
-        self.default_settings = {
-            'region': 'United States',
-            'brand': 'Google',
-            'model': 'Wifi',
-            'hostapd_profile': 'whirlwind',
-            '2G': {
-                'status': 0,
-                'ssid': 'GoogleWifi_2G',
-                'channel': 11,
-                'bandwidth': 'VHT20',
-                'power': 'auto',
-                'mode': None,
-                'num_streams': None,
-                'rate': 'auto',
-                'short_gi': 0,
-                'security_type': 'Open',
-                'password': 'password',
-                'subnet': '192.168.1.0/24'
-            },
-            '5G_1': {
-                'status': 0,
-                'ssid': 'GoogleWifi_2G',
-                'channel': 11,
-                'bandwidth': 'VHT20',
-                'power': 'auto',
-                'mode': None,
-                'num_streams': None,
-                'rate': 'auto',
-                'short_gi': 0,
-                'security_type': 'Open',
-                'password': 'password',
-                'subnet': '192.168.9.0/24'
-            }
-        }
-        for setting in self.default_settings.keys():
-            if setting in self.capabilities['interfaces']:
-                continue
-            elif setting not in self.ap_settings:
-                self.log.debug(
-                    '{0} {1} not found during init. Setting {0} = {1}'.format(
-                        setting, self.default_settings[setting]))
-                self.ap_settings[setting] = self.default_settings[setting]
-
-        for interface in self.capabilities['interfaces']:
-            for setting in self.default_settings[interface].keys():
-                if setting not in self.ap_settings[interface]:
-                    self.log.debug(
-                        '{0} {1} not found during init. Setting {1} = {2}'.
-                        format(interface, setting,
-                               self.default_settings[interface][setting]))
-                    self.ap_settings[interface][
-                        setting] = self.default_settings[interface][setting]
-        init_settings = self.ap_settings.copy()
-        init_settings['ap_subnet'] = {
-            '2g': self.ap_settings['2G']['subnet'],
-            '5g': self.ap_settings['5G_1']['subnet']
-        }
-        self.access_point = access_point.AccessPoint(init_settings)
-        self.configure_ap()
-
-    def read_ap_settings(self):
-        """Function that reads current ap settings."""
-        return self.ap_settings.copy()
-
-    def update_ap_settings(self, dict_settings={}, **named_settings):
-        """Function to update settings of existing AP.
-
-        Function copies arguments into ap_settings and calls configure_ap
-        to apply them.
-
-        Args:
-            dict_settings: single dictionary of settings to update
-            **named_settings: named settings to update
-            Note: dict and named_settings cannot contain the same settings.
-        """
-        settings_to_update = dict(dict_settings, **named_settings)
-        if len(settings_to_update) != len(dict_settings) + len(named_settings):
-            raise KeyError('The following keys were passed twice: {}'.format(
-                (set(dict_settings.keys()).intersection(
-                    set(named_settings.keys())))))
-
-        updating_2G = '2G' in settings_to_update.keys()
-        updating_5G_1 = '5G_1' in settings_to_update.keys()
-        if updating_2G and updating_5G_1:
-            raise ValueError(
-                'Error updating Google WiFi AP. '
-                'One interface can be activated and updated at a time')
-        elif updating_2G:
-            # If updating an interface and not explicitly setting its status,
-            # it is assumed that the interface is to be ENABLED and updated
-            if 'status' not in settings_to_update['2G']:
-                settings_to_update['2G']['status'] = 1
-                settings_to_update['5G_1'] = {'status': 0}
-        elif updating_5G_1:
-            if 'status' not in settings_to_update['5G_1']:
-                settings_to_update['2G'] = {'status': 0}
-                settings_to_update['5G_1']['status'] = 1
-        self.ap_settings, updates_requested, status_toggle_flag = self._update_settings_dict(
-            self.ap_settings, settings_to_update)
-        if updates_requested:
-            self.configure_ap()
-
-    def configure_ap(self):
-        """Function to configure Google Wifi."""
-        self.log.info('Stopping Google Wifi interfaces.')
-        print(self.ap_settings)
-        self.access_point.stop_all_aps()
-
-        if self.ap_settings['2G']['status'] == 1:
-            interface = '2G'
-            self.log.info('Bringing up 2.4 GHz interface.')
-        elif self.ap_settings['5G_1']['status'] == 1:
-            interface = '5G_1'
-            self.log.info('Bringing up 5 GHz interface.')
-        else:
-            return
-
-        bss_settings = []
-        ssid = self.ap_settings[interface]['ssid']
-        security_mode = self.ap_settings[interface]['security_type'].lower()
-        if 'wpa' in security_mode:
-            password = self.ap_settings[interface]['password']
-            security = hostapd_security.Security(security_mode=security_mode,
-                                                 password=password)
-        else:
-            security = hostapd_security.Security(security_mode=None,
-                                                 password=None)
-        channel = int(self.ap_settings[interface]['channel'])
-        bandwidth = self.BW_MODE_MAP[self.ap_settings[interface]['bandwidth']]
-        config = hostapd_ap_preset.create_ap_preset(
-            channel=channel,
-            ssid=ssid,
-            security=security,
-            bss_settings=bss_settings,
-            vht_bandwidth=bandwidth,
-            profile_name=self.ap_settings['hostapd_profile'],
-            iface_wlan_2g=self.access_point.wlan_2g,
-            iface_wlan_5g=self.access_point.wlan_5g)
-        config_bridge = self.access_point.generate_bridge_configs(channel)
-        brconfigs = bridge_interface.BridgeInterfaceConfigs(
-            config_bridge[0], 'lan0', config_bridge[2])
-        self.access_point.bridge.startup(brconfigs)
-        self.access_point.start_ap(config)
-        self.set_power(interface, self.ap_settings[interface]['power'])
-        self.set_rate(interface,
-                      mode=self.ap_settings[interface]['mode'],
-                      num_streams=self.ap_settings[interface]['num_streams'],
-                      rate=self.ap_settings[interface]['rate'],
-                      short_gi=self.ap_settings[interface]['short_gi'])
-        self.log.info('AP started on channel {} with SSID {}'.format(
-            channel, ssid))
-
-    def set_power(self, interface, power):
-        """Function that sets interface transmit power.
-
-        Args:
-            interface: string containing interface identifier (2G, 5G_1)
-            power: power level in dBm
-        """
-        if power == 'auto':
-            power_string = 'auto'
-        else:
-            if not float(power).is_integer():
-                self.log.info(
-                    'Power in dBm must be an integer. Setting to {}'.format(
-                        int(power)))
-            power = int(power)
-            power_string = 'fixed {}'.format(int(power) * 100)
-
-        if '2G' in interface:
-            interface_long = self.access_point.wlan_2g
-            self.ap_settings[interface]['power'] = power
-        elif '5G_1' in interface:
-            interface_long = self.access_point.wlan_5g
-            self.ap_settings[interface]['power'] = power
-        self.access_point.ssh.run('iw dev {} set txpower {}'.format(
-            interface_long, power_string))
-
-    def set_rate(self,
-                 interface,
-                 mode=None,
-                 num_streams=None,
-                 rate='auto',
-                 short_gi=0):
-        """Function that sets rate.
-
-        Args:
-            interface: string containing interface identifier (2G, 5G_1)
-            mode: string indicating the WiFi standard to use
-            num_streams: number of MIMO streams. used only for VHT
-            rate: data rate of MCS index to use
-            short_gi: boolean controlling the use of short guard interval
-        """
-        if '2G' in interface:
-            interface_long = self.access_point.wlan_2g
-            interface_short = '2.4'
-        elif '5G_1' in interface:
-            interface_long = self.access_point.wlan_5g
-            interface_short = '5'
-        self.ap_settings[interface]['mode'] = mode
-        self.ap_settings[interface]['num_streams'] = num_streams
-        self.ap_settings[interface]['rate'] = rate
-        self.ap_settings[interface]['short_gi'] = short_gi
-
-        if rate == 'auto':
-            cmd_string = 'iw dev {0} set bitrates'.format(interface_long)
-        elif 'legacy' in mode.lower():
-            cmd_string = 'iw dev {0} set bitrates legacy-{1} {2} ht-mcs-{1} vht-mcs-{1}'.format(
-                interface_long, interface_short, rate)
-        elif 'vht' in mode.lower():
-            cmd_string = 'iw dev {0} set bitrates legacy-{1} ht-mcs-{1} vht-mcs-{1} {2}:{3}'.format(
-                interface_long, interface_short, num_streams, rate)
-            if short_gi:
-                cmd_string = cmd_string + ' sgi-{}'.format(interface_short)
-        elif 'ht' in mode.lower():
-            cmd_string = 'iw dev {0} set bitrates legacy-{1} ht-mcs-{1} {2} vht-mcs-{1}'.format(
-                interface_long, interface_short, rate)
-            if short_gi:
-                cmd_string = cmd_string + ' sgi-{}'.format(interface_short)
-        self.access_point.ssh.run(cmd_string)
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7000.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7000.py
deleted file mode 100644
index 3f73aa6..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7000.py
+++ /dev/null
@@ -1,282 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearR7000AP(WifiRetailAP):
-    """Class that implements Netgear R7000 AP."""
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_dual_band_r10.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_dual_band_r10.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_dual_band2.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.region_map = {
-            '1': 'Africa',
-            '2': 'Asia',
-            '3': 'Australia',
-            '4': 'Canada',
-            '5': 'Europe',
-            '6': 'Israel',
-            '7': 'Japan',
-            '8': 'Korea',
-            '9': 'Mexico',
-            '10': 'South America',
-            '11': 'United States',
-            '12': 'Middle East(Algeria/Syria/Yemen)',
-            '14': 'Russia',
-            '16': 'China',
-            '17': 'India',
-            '18': 'Malaysia',
-            '19': 'Middle East(Iran/Labanon/Qatar)',
-            '20': 'Middle East(Turkey/Egypt/Tunisia/Kuwait)',
-            '21': 'Middle East(Saudi Arabia)',
-            '22': 'Middle East(United Arab Emirates)',
-            '23': 'Singapore',
-            '24': 'Taiwan'
-        }
-        self.config_page_fields = {
-            'region': 'WRegion',
-            ('2G', 'status'): 'enable_ap',
-            ('5G_1', 'status'): 'enable_ap_an',
-            ('2G', 'ssid'): 'ssid',
-            ('5G_1', 'ssid'): 'ssid_an',
-            ('2G', 'channel'): 'w_channel',
-            ('5G_1', 'channel'): 'w_channel_an',
-            ('2G', 'bandwidth'): 'opmode',
-            ('5G_1', 'bandwidth'): 'opmode_an',
-            ('2G', 'power'): 'enable_tpc',
-            ('5G_1', 'power'): 'enable_tpc_an',
-            ('2G', 'security_type'): 'security_type',
-            ('5G_1', 'security_type'): 'security_type_an',
-            ('2G', 'password'): 'passphrase',
-            ('5G_1', 'password'): 'passphrase_an'
-        }
-        self.bw_mode_values = {
-            'g and b': '11g',
-            '145Mbps': 'VHT20',
-            '300Mbps': 'VHT40',
-            'HT80': 'VHT80'
-        }
-        self.power_mode_values = {
-            '1': '100%',
-            '2': '75%',
-            '3': '50%',
-            '4': '25%'
-        }
-        self.bw_mode_text = {
-            '11g': 'Up to 54 Mbps',
-            'VHT20': 'Up to 289 Mbps',
-            'VHT40': 'Up to 600 Mbps',
-            'VHT80': 'Up to 1300 Mbps'
-        }
-
-    def read_ap_settings(self):
-        """Function to read ap settings."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    browser.visit_persistent(self.config_page_advanced,
-                                             BROWSER_WAIT_MED, 10)
-                    config_item = browser.find_by_name(value)
-                    self.ap_settings[key[0]][key[1]] = int(
-                        config_item.first.checked)
-                    browser.visit_persistent(self.config_page,
-                                             BROWSER_WAIT_MED, 10)
-                else:
-                    config_item = browser.find_by_name(value)
-                    if 'bandwidth' in key:
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            config_item.first.value]
-                    elif 'power' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self.power_mode_values[
-                                config_item.first.value]
-                    elif 'region' in key:
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.first.value]
-                    elif 'security_type' in key:
-                        for item in config_item:
-                            if item.checked:
-                                self.ap_settings[key[0]][key[1]] = item.value
-                    else:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][
-                            key[1]] = config_item.first.value
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_nologin,
-                                     BROWSER_WAIT_MED, 10, self.config_page)
-
-            # Update region, and power/bandwidth for each network
-            config_item = browser.find_by_name(
-                self.config_page_fields['region']).first
-            config_item.select_by_text(self.ap_settings['region'])
-            for key, value in self.config_page_fields.items():
-                if 'power' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.select_by_text(
-                        self.ap_settings[key[0]][key[1]])
-                elif 'bandwidth' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select_by_text(self.bw_mode_text[
-                            self.ap_settings[key[0]][key[1]]])
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select bandwidth. Keeping AP default.')
-
-            # Update security settings (passwords updated only if applicable)
-            for key, value in self.config_page_fields.items():
-                if 'security_type' in key:
-                    browser.choose(value, self.ap_settings[key[0]][key[1]])
-                    if self.ap_settings[key[0]][key[1]] == 'WPA2-PSK':
-                        config_item = browser.find_by_name(
-                            self.config_page_fields[(key[0],
-                                                     'password')]).first
-                        config_item.fill(self.ap_settings[key[0]]['password'])
-
-            # Update SSID and channel for each network
-            # NOTE: Update ordering done as such as workaround for R8000
-            # wherein channel and SSID get overwritten when some other
-            # variables are changed. However, region does have to be set before
-            # channel in all cases.
-            for key, value in self.config_page_fields.items():
-                if 'ssid' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.fill(self.ap_settings[key[0]][key[1]])
-                elif 'channel' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select(self.ap_settings[key[0]][key[1]])
-                        time.sleep(BROWSER_WAIT_SHORT)
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select channel. Keeping AP default.')
-                    try:
-                        alert = browser.get_alert()
-                        alert.accept()
-                    except:
-                        pass
-
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_SHORT)
-            try:
-                alert = browser.get_alert()
-                alert.accept()
-                time.sleep(BROWSER_WAIT_SHORT)
-            except:
-                time.sleep(BROWSER_WAIT_SHORT)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-    def configure_radio_on_off(self):
-        """Helper configuration function to turn radios on/off."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED, 10)
-
-            # Turn radios on or off
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings[key[0]][key[1]]:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-
-class NetgearR7000NAAP(NetgearR7000AP):
-    """Class that implements Netgear R7000 NA AP."""
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        super().init_gui_data()
-        self.region_map['11'] = 'North America'
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7500.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7500.py
deleted file mode 100644
index 4c2729e..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7500.py
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import selenium
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearR7500AP(WifiRetailAP):
-    """Class that implements Netgear R7500 AP."""
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        self.config_page = ('{protocol}://{username}:{password}@'
-                            '{ip_address}:{port}/index.htm').format(
-                                protocol=self.ap_settings['protocol'],
-                                username=self.ap_settings['admin_username'],
-                                password=self.ap_settings['admin_password'],
-                                ip_address=self.ap_settings['ip_address'],
-                                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/adv_index.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.config_page_fields = {
-            'region': 'WRegion',
-            ('2G', 'status'): 'enable_ap',
-            ('5G_1', 'status'): 'enable_ap_an',
-            ('2G', 'ssid'): 'ssid',
-            ('5G_1', 'ssid'): 'ssid_an',
-            ('2G', 'channel'): 'w_channel',
-            ('5G_1', 'channel'): 'w_channel_an',
-            ('2G', 'bandwidth'): 'opmode',
-            ('5G_1', 'bandwidth'): 'opmode_an',
-            ('2G', 'security_type'): 'security_type',
-            ('5G_1', 'security_type'): 'security_type_an',
-            ('2G', 'password'): 'passphrase',
-            ('5G_1', 'password'): 'passphrase_an'
-        }
-        self.region_map = {
-            '0': 'Africa',
-            '1': 'Asia',
-            '2': 'Australia',
-            '3': 'Canada',
-            '4': 'Europe',
-            '5': 'Israel',
-            '6': 'Japan',
-            '7': 'Korea',
-            '8': 'Mexico',
-            '9': 'South America',
-            '10': 'United States',
-            '11': 'China',
-            '12': 'India',
-            '13': 'Malaysia',
-            '14': 'Middle East(Algeria/Syria/Yemen)',
-            '15': 'Middle East(Iran/Labanon/Qatar)',
-            '16': 'Middle East(Turkey/Egypt/Tunisia/Kuwait)',
-            '17': 'Middle East(Saudi Arabia)',
-            '18': 'Middle East(United Arab Emirates)',
-            '19': 'Russia',
-            '20': 'Singapore',
-            '21': 'Taiwan'
-        }
-        self.bw_mode_text = {
-            '2G': {
-                '11g': 'Up to 54 Mbps',
-                'VHT20': 'Up to 289 Mbps',
-                'VHT40': 'Up to 600 Mbps'
-            },
-            '5G_1': {
-                'VHT20': 'Up to 347 Mbps',
-                'VHT40': 'Up to 800 Mbps',
-                'VHT80': 'Up to 1733 Mbps'
-            }
-        }
-        self.bw_mode_values = {
-            '1': '11g',
-            '2': 'VHT20',
-            '3': 'VHT40',
-            '7': 'VHT20',
-            '8': 'VHT40',
-            '9': 'VHT80'
-        }
-        self.security_mode_values = {
-            '2G': {
-                'Disable': 'security_disable',
-                'WPA2-PSK': 'security_wpa2'
-            },
-            '5G_1': {
-                'Disable': 'security_an_disable',
-                'WPA2-PSK': 'security_an_wpa2'
-            }
-        }
-
-    def read_ap_settings(self):
-        """Function to read ap wireless settings."""
-        # Get radio status (on/off)
-        self.read_radio_on_off()
-        # Get radio configuration. Note that if both radios are off, the below
-        # code will result in an error
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='wireless')
-            wireless_button = browser.find_by_id('wireless').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                for key, value in self.config_page_fields.items():
-                    if 'bandwidth' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            config_item.value]
-                    elif 'region' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.value]
-                    elif 'password' in key:
-                        try:
-                            config_item = iframe.find_by_name(value).first
-                            self.ap_settings[key[0]][
-                                key[1]] = config_item.value
-                            self.ap_settings[
-                                key[0]]['security_type'] = 'WPA2-PSK'
-                        except:
-                            self.ap_settings[key[0]][
-                                key[1]] = 'defaultpassword'
-                            self.ap_settings[
-                                key[0]]['security_type'] = 'Disable'
-                    elif ('channel' in key) or ('ssid' in key):
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = config_item.value
-                    else:
-                        pass
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='wireless')
-            wireless_button = browser.find_by_id('wireless').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # Update AP region. Must be done before channel setting
-                config_item = iframe.find_by_name(
-                    self.config_page_fields['region']).first
-                config_item.select_by_text(self.ap_settings['region'])
-                # Update wireless settings for each network
-                for key, value in self.config_page_fields.items():
-                    if 'ssid' in key:
-                        config_item = iframe.find_by_name(value).first
-                        config_item.fill(self.ap_settings[key[0]][key[1]])
-                    elif 'channel' in key:
-                        channel = self.ap_settings[key[0]][key[1]]
-                        if int(channel) < 10:
-                            channel_string = '0' + str(channel)
-                        elif int(channel) > 48 and int(channel) < 149:
-                            channel_string = str(channel) + 'DFS'
-                        else:
-                            channel_string = str(channel)
-                        config_item = iframe.find_by_name(value).first
-                        try:
-                            config_item.select_by_text(channel_string)
-                        except AttributeError:
-                            self.log.warning(
-                                'Cannot select channel. Keeping AP default.')
-                    elif 'bandwidth' in key:
-                        config_item = iframe.find_by_name(value).first
-                        try:
-                            config_item.select_by_text(
-                                str(self.bw_mode_text[key[0]][self.ap_settings[
-                                    key[0]][key[1]]]))
-                        except AttributeError:
-                            self.log.warning(
-                                'Cannot select bandwidth. Keeping AP default.')
-                # Update passwords for WPA2-PSK protected networks
-                # (Must be done after security type is selected)
-                for key, value in self.config_page_fields.items():
-                    if 'security_type' in key:
-                        security_option = browser.driver.find_element_by_id(
-                            self.security_mode_values[key[0]][self.ap_settings[
-                                key[0]][key[1]]])
-                        action = selenium.webdriver.common.action_chains.ActionChains(
-                            browser.driver)
-                        action.move_to_element(
-                            security_option).click().perform()
-                        if self.ap_settings[key[0]][key[1]] == 'WPA2-PSK':
-                            config_item = iframe.find_by_name(
-                                self.config_page_fields[(key[0],
-                                                         'password')]).first
-                            config_item.fill(
-                                self.ap_settings[key[0]]['password'])
-
-                apply_button = iframe.find_by_name('Apply')
-                apply_button[0].click()
-                time.sleep(BROWSER_WAIT_SHORT)
-                try:
-                    alert = browser.get_alert()
-                    alert.accept()
-                except:
-                    pass
-                time.sleep(BROWSER_WAIT_SHORT)
-                try:
-                    alert = browser.get_alert()
-                    alert.accept()
-                except:
-                    pass
-                time.sleep(BROWSER_WAIT_SHORT)
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-    def configure_radio_on_off(self):
-        """Helper configuration function to turn radios on/off."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='advanced_bt')
-            advanced_button = browser.find_by_id('advanced_bt').first
-            advanced_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-            wireless_button = browser.find_by_id('wladv').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # Turn radios on or off
-                for key, value in self.config_page_fields.items():
-                    if 'status' in key:
-                        config_item = iframe.find_by_name(value).first
-                        if self.ap_settings[key[0]][key[1]]:
-                            config_item.check()
-                        else:
-                            config_item.uncheck()
-
-                time.sleep(BROWSER_WAIT_SHORT)
-                browser.find_by_name('Apply').first.click()
-                time.sleep(BROWSER_WAIT_EXTRA_LONG)
-                browser.visit_persistent(self.config_page,
-                                         BROWSER_WAIT_EXTRA_LONG, 10)
-
-    def read_radio_on_off(self):
-        """Helper configuration function to read radio status."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='advanced_bt')
-            advanced_button = browser.find_by_id('advanced_bt').first
-            advanced_button.click()
-            time.sleep(BROWSER_WAIT_SHORT)
-            wireless_button = browser.find_by_id('wladv').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # Turn radios on or off
-                for key, value in self.config_page_fields.items():
-                    if 'status' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = int(
-                            config_item.checked)
-
-
-class NetgearR7500NAAP(NetgearR7500AP):
-    """Class that implements Netgear R7500 NA AP."""
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        super().init_gui_data()
-        self.region_map['10'] = 'North America'
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7800.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7800.py
deleted file mode 100644
index 7528b9c..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7800.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.wifi.wifi_retail_ap import NetgearR7500AP
-
-
-class NetgearR7800AP(NetgearR7500AP):
-    """Class that implements Netgear R7800 AP.
-
-    Since most of the class' implementation is shared with the R7500, this
-    class inherits from NetgearR7500AP and simply redefines config parameters
-    """
-    def init_gui_data(self):
-        super().init_gui_data()
-        # Overwrite minor differences from R7500 AP
-        self.bw_mode_text_2g['VHT20'] = 'Up to 347 Mbps'
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r8000.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r8000.py
deleted file mode 100644
index 81b96b8..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r8000.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.wifi.wifi_retail_ap import NetgearR7000AP
-
-
-class NetgearR8000AP(NetgearR7000AP):
-    """Class that implements Netgear R8000 AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def init_gui_data(self):
-        super().init_gui_data()
-        # Overwrite minor differences from R7000 AP
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_dual_band_r8000.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_dual_band_r8000.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_dual_band2_r8000.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1', '5G_2'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [36, 40, 44, 48],
-                '5G_2': [149, 153, 157, 161, 165]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80'],
-                '5G_2': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.config_page_fields = {
-            'region': 'WRegion',
-            ('2G', 'status'): 'enable_ap',
-            ('5G_1', 'status'): 'enable_ap_an',
-            ('5G_2', 'status'): 'enable_ap_an_2',
-            ('2G', 'ssid'): 'ssid',
-            ('5G_1', 'ssid'): 'ssid_an',
-            ('5G_2', 'ssid'): 'ssid_an_2',
-            ('2G', 'channel'): 'w_channel',
-            ('5G_1', 'channel'): 'w_channel_an',
-            ('5G_2', 'channel'): 'w_channel_an_2',
-            ('2G', 'bandwidth'): 'opmode',
-            ('5G_1', 'bandwidth'): 'opmode_an',
-            ('5G_2', 'bandwidth'): 'opmode_an_2',
-            ('2G', 'security_type'): 'security_type',
-            ('5G_1', 'security_type'): 'security_type_an',
-            ('5G_2', 'security_type'): 'security_type_an_2',
-            ('2G', 'password'): 'passphrase',
-            ('5G_1', 'password'): 'passphrase_an',
-            ('5G_2', 'password'): 'passphrase_an_2'
-        }
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax120.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax120.py
deleted file mode 100644
index 36ed18d..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax120.py
+++ /dev/null
@@ -1,408 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import selenium
-import time
-from antlion.test_utils.wifi.wifi_retail_ap.netgear_r7500 import NetgearR7500AP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearRAX120AP(NetgearR7500AP):
-    """Class that implements Netgear RAX120 AP.
-
-    Since most of the class' implementation is shared with the R7500, this
-    class inherits from NetgearR7500AP and simply redefines config parameters
-    """
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        self.config_page = ('{protocol}://{username}:{password}@'
-                            '{ip_address}:{port}/index.htm').format(
-                                protocol=self.ap_settings['protocol'],
-                                username=self.ap_settings['admin_username'],
-                                password=self.ap_settings['admin_password'],
-                                ip_address=self.ap_settings['ip_address'],
-                                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/adv_index.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40', 'HE20', 'HE40'],
-                '5G_1': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ]
-            },
-            'default_mode': 'HE'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.config_page_fields = {
-            'region': 'WRegion',
-            'enable_ax': 'enable_ax_chec',
-            ('2G', 'status'): 'enable_ap',
-            ('5G_1', 'status'): 'enable_ap_an',
-            ('2G', 'ssid'): 'ssid',
-            ('5G_1', 'ssid'): 'ssid_an',
-            ('2G', 'channel'): 'w_channel',
-            ('5G_1', 'channel'): 'w_channel_an',
-            ('2G', 'bandwidth'): 'opmode',
-            ('5G_1', 'bandwidth'): 'opmode_an',
-            ('2G', 'security_type'): 'security_type',
-            ('5G_1', 'security_type'): 'security_type_an',
-            ('2G', 'password'): 'passphrase',
-            ('5G_1', 'password'): 'passphrase_an'
-        }
-        self.region_map = {
-            '0': 'Africa',
-            '1': 'Asia',
-            '2': 'Australia',
-            '3': 'Canada',
-            '4': 'Europe',
-            '5': 'Israel',
-            '6': 'Japan',
-            '7': 'Korea',
-            '8': 'Mexico',
-            '9': 'South America',
-            '10': 'United States',
-            '11': 'China',
-            '12': 'India',
-            '13': 'Malaysia',
-            '14': 'Middle East(Algeria/Syria/Yemen)',
-            '15': 'Middle East(Iran/Labanon/Qatar)',
-            '16': 'Middle East(Egypt/Tunisia/Kuwait)',
-            '17': 'Middle East(Turkey)',
-            '18': 'Middle East(Saudi Arabia/United Arab Emirates)',
-            '19': 'Russia',
-            '20': 'Singapore',
-            '21': 'Taiwan',
-            'Australia': 'Australia',
-            'Europe': 'Europe',
-            'Korea': 'Korea',
-            'Singapore': 'Singapore',
-            'Hong Kong': 'Hong Kong',
-            'United States': 'United States',
-        }
-        self.bw_mode_text = {
-            '2G': {
-                '11g': 'Up to 54 Mbps (11g)',
-                'HE20': 'Up to 573.5 Mbps (11ax, HT20, 1024-QAM)',
-                'HE40': 'Up to 1147 Mbps (11ax, HT40, 1024-QAM)',
-                'VHT20': 'Up to 481 Mbps (11ng, HT20, 1024-QAM)',
-                'VHT40': 'Up to 1000 Mbps (11ng, HT40, 1024-QAM)'
-            },
-            '5G_1': {
-                'HE20': 'Up to 1147 Mbps (11ax, HT20, 1024-QAM)',
-                'HE40': 'Up to 2294 Mbps (11ax, HT40, 1024-QAM)',
-                'HE80': 'Up to 4803 Mbps (80MHz) (11ax, HT80, 1024-QAM)',
-                'HE160': 'Up to 4803 Mbps (160MHz) (11ax, HT160, 1024-QAM)',
-                'VHT20': 'Up to 962 Mbps (11ac, HT20, 1024-QAM)',
-                'VHT40': 'Up to 2000 Mbps (11ac, HT40, 1024-QAM)',
-                'VHT80': 'Up to 4333 Mbps (80MHz) (11ac, HT80, 1024-QAM)',
-                'VHT160': 'Up to 4333 Mbps (160MHz) (11ac, HT160, 1024-QAM)'
-            }
-        }
-        self.bw_mode_values = {
-            # first key is a boolean indicating if 11ax is enabled
-            0: {
-                '1': '11g',
-                '2': 'VHT20',
-                '3': 'VHT40',
-                '7': 'VHT20',
-                '8': 'VHT40',
-                '9': 'VHT80',
-                '10': 'VHT160'
-            },
-            1: {
-                '1': '11g',
-                '2': 'HE20',
-                '3': 'HE40',
-                '7': 'HE20',
-                '8': 'HE40',
-                '9': 'HE80',
-                '10': 'HE160',
-                '54': '11g',
-                '573.5': 'HE20',
-                '1146': 'HE40',
-                '1147': 'HE20',
-                '2294': 'HE40',
-                '4803-HT80': 'HE80',
-                '4803-HT160': 'HE160'
-            }
-        }
-        self.security_mode_values = {
-            '2G': {
-                'Disable': 'security_disable',
-                'WPA2-PSK': 'security_wpa2'
-            },
-            '5G_1': {
-                'Disable': 'security_an_disable',
-                'WPA2-PSK': 'security_an_wpa2'
-            }
-        }
-
-    def _set_channel_and_bandwidth(self,
-                                   network,
-                                   channel=None,
-                                   bandwidth=None):
-        """Helper function that sets network bandwidth and channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = {network: {}}
-        if channel:
-            if channel not in self.capabilities['channels'][network]:
-                self.log.error('Ch{} is not supported on {} interface.'.format(
-                    channel, network))
-            setting_to_update[network]['channel'] = channel
-
-        if bandwidth is None:
-            return setting_to_update
-
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        setting_to_update[network]['bandwidth'] = str(bandwidth)
-        setting_to_update['enable_ax'] = int('HE' in bandwidth)
-        # Check if other interfaces need to be changed too
-        requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
-        for other_network in self.capabilities['interfaces']:
-            if other_network == network:
-                continue
-            other_mode = 'HE' if 'HE' in self.ap_settings[other_network][
-                'bandwidth'] else 'VHT'
-            other_bw = ''.join([
-                x for x in self.ap_settings[other_network]['bandwidth']
-                if x.isdigit()
-            ])
-            if other_mode != requested_mode:
-                updated_mode = '{}{}'.format(requested_mode, other_bw)
-                self.log.warning('All networks must be VHT or HE. '
-                                 'Updating {} to {}'.format(
-                                     other_network, updated_mode))
-                setting_to_update.setdefault(other_network, {})
-                setting_to_update[other_network]['bandwidth'] = updated_mode
-        return setting_to_update
-
-    def set_bandwidth(self, network, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel(self, network, channel):
-        """Function that sets network channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string or int containing channel
-        """
-        setting_to_update = self._set_channel_and_bandwidth(network,
-                                                            channel=channel)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel_and_bandwidth(self, network, channel, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, channel=channel, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def read_ap_settings(self):
-        """Function to read ap wireless settings."""
-        # Get radio status (on/off)
-        self.read_radio_on_off()
-        # Get radio configuration. Note that if both radios are off, the below
-        # code will result in an error
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='wireless')
-            wireless_button = browser.find_by_id('wireless').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # read if 11ax is enabled first
-                config_item = iframe.find_by_name('enable_ax').first
-                self.ap_settings['enable_ax'] = int(config_item.checked)
-                # read rest of configuration
-                for key, value in self.config_page_fields.items():
-                    if 'bandwidth' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            self.ap_settings['enable_ax']][config_item.value]
-                    elif 'region' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.value]
-                    elif 'password' in key:
-                        try:
-                            config_item = iframe.find_by_name(value).first
-                            self.ap_settings[key[0]][
-                                key[1]] = config_item.value
-                            self.ap_settings[
-                                key[0]]['security_type'] = 'WPA2-PSK'
-                        except:
-                            self.ap_settings[key[0]][
-                                key[1]] = 'defaultpassword'
-                            self.ap_settings[
-                                key[0]]['security_type'] = 'Disable'
-                    elif ('ssid' in key):
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = config_item.value
-                    elif ('channel' in key):
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = int(
-                            config_item.value)
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='wireless')
-            wireless_button = browser.find_by_id('wireless').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # Create action chain
-                action = selenium.webdriver.common.action_chains.ActionChains(
-                    browser.driver)
-                # Configure 11ax on or off
-                curr_ax_enabled = int(
-                    iframe.find_by_name('enable_ax').first.checked)
-                if self.ap_settings['enable_ax'] != curr_ax_enabled:
-                    ax_checkbox = browser.driver.find_element_by_id(
-                        'enable_ax_chec')
-                    action.move_to_element(ax_checkbox).click().perform()
-                # Update AP region. Must be done before channel setting
-                try:
-                    config_item = iframe.find_by_name(
-                        self.config_page_fields['region']).first
-                    config_item.select_by_text(self.ap_settings['region'])
-                except:
-                    self.log.warning('Could not set AP region to {}.'.format(
-                        self.ap_settings['region']))
-                # Update wireless settings for each network
-                for key, value in self.config_page_fields.items():
-                    if 'ssid' in key:
-                        config_item = iframe.find_by_name(value).first
-                        config_item.fill(self.ap_settings[key[0]][key[1]])
-                    elif 'channel' in key:
-                        channel = self.ap_settings[key[0]][key[1]]
-                        if int(channel) < 10:
-                            channel_string = '0' + str(channel)
-                        elif int(channel) > 48 and int(channel) < 149:
-                            channel_string = str(channel) + 'DFS'
-                        else:
-                            channel_string = str(channel)
-                        config_item = iframe.find_by_name(value).first
-                        try:
-                            config_item.select_by_text(channel_string)
-                        except AttributeError:
-                            self.log.warning(
-                                'Cannot select channel. Keeping AP default.')
-                    elif 'bandwidth' in key:
-                        config_item = iframe.find_by_name(value).first
-                        try:
-                            config_item.select_by_text(
-                                str(self.bw_mode_text[key[0]][self.ap_settings[
-                                    key[0]][key[1]]]))
-                        except AttributeError:
-                            self.log.warning(
-                                'Cannot select bandwidth. Keeping AP default.')
-                # Update passwords for WPA2-PSK protected networks
-                # (Must be done after security type is selected)
-                for key, value in self.config_page_fields.items():
-                    if 'security_type' in key:
-                        security_option = browser.driver.find_element_by_id(
-                            self.security_mode_values[key[0]][self.ap_settings[
-                                key[0]][key[1]]])
-                        action = selenium.webdriver.common.action_chains.ActionChains(
-                            browser.driver)
-                        action.move_to_element(
-                            security_option).click().perform()
-                        if self.ap_settings[key[0]][key[1]] == 'WPA2-PSK':
-                            config_item = iframe.find_by_name(
-                                self.config_page_fields[(key[0],
-                                                         'password')]).first
-                            config_item.fill(
-                                self.ap_settings[key[0]]['password'])
-
-                apply_button = iframe.find_by_name('Apply')
-                apply_button[0].click()
-                time.sleep(BROWSER_WAIT_SHORT)
-                try:
-                    alert = browser.get_alert()
-                    alert.accept()
-                except:
-                    pass
-                time.sleep(BROWSER_WAIT_SHORT)
-                try:
-                    alert = browser.get_alert()
-                    alert.accept()
-                except:
-                    pass
-                time.sleep(BROWSER_WAIT_SHORT)
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax200.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax200.py
deleted file mode 100644
index 0034bf3..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax200.py
+++ /dev/null
@@ -1,415 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearRAX200AP(WifiRetailAP):
-    """Class that implements Netgear RAX200 AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_tri_band2.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1', '5G_2'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [36, 40, 44, 48, 52, 56, 60, 64],
-                '5G_2': [
-                    100, 104, 108, 112, 116, 120, 124, 128, 132, 136, 140, 144,
-                    149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40', 'HE20', 'HE40'],
-                '5G_1': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ],
-                '5G_2': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ]
-            },
-            'default_mode': 'HE'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.region_map = {
-            '3': 'Australia',
-            '4': 'Canada',
-            '5': 'Europe',
-            '7': 'Japan',
-            '8': 'Korea',
-            '11': 'North America',
-            '16': 'China',
-            '17': 'India',
-            '21': 'Middle East(Saudi Arabia/United Arab Emirates)',
-            '23': 'Singapore',
-            '25': 'Hong Kong',
-            '26': 'Vietnam'
-        }
-
-        self.bw_mode_text = {
-            '2G': {
-                '11g': 'Up to 54 Mbps',
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps'
-            },
-            '5G_1': {
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'HE80': 'Up to 2400 Mbps',
-                'HE160': 'Up to 4800 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps',
-                'VHT80': 'Up to 2165 Mbps',
-                'VHT160': 'Up to 4330 Mbps'
-            },
-            '5G_2': {
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'HE80': 'Up to 2400 Mbps',
-                'HE160': 'Up to 4800 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps',
-                'VHT80': 'Up to 2165 Mbps',
-                'VHT160': 'Up to 4330 Mbps'
-            }
-        }
-        self.bw_mode_values = {
-            # first key is a boolean indicating if 11ax is enabled
-            0: {
-                'g and b': '11g',
-                '145Mbps': 'VHT20',
-                '300Mbps': 'VHT40',
-                'HT80': 'VHT80',
-                'HT160': 'VHT160'
-            },
-            1: {
-                'g and b': '11g',
-                '145Mbps': 'HE20',
-                '300Mbps': 'HE40',
-                'HT80': 'HE80',
-                'HT160': 'HE160'
-            }
-        }
-
-        # Config ordering intentional to avoid GUI bugs
-        self.config_page_fields = collections.OrderedDict([
-            ('region', 'WRegion'), ('enable_ax', 'enable_he'),
-            (('2G', 'status'), 'enable_ap'),
-            (('5G_1', 'status'), 'enable_ap_an'),
-            (('5G_2', 'status'), 'enable_ap_an_2'), (('2G', 'ssid'), 'ssid'),
-            (('5G_1', 'ssid'), 'ssid_an'), (('5G_2', 'ssid'), 'ssid_an_2'),
-            (('2G', 'channel'), 'w_channel'),
-            (('5G_1', 'channel'), 'w_channel_an'),
-            (('5G_2', 'channel'), 'w_channel_an_2'),
-            (('2G', 'bandwidth'), 'opmode'),
-            (('5G_1', 'bandwidth'), 'opmode_an'),
-            (('5G_2', 'bandwidth'), 'opmode_an_2'),
-            (('2G', 'power'), 'enable_tpc'),
-            (('5G_1', 'power'), 'enable_tpc_an'),
-            (('5G_2', 'power'), 'enable_tpc_an_2'),
-            (('5G_2', 'security_type'), 'security_type_an_2'),
-            (('5G_1', 'security_type'), 'security_type_an'),
-            (('2G', 'security_type'), 'security_type'),
-            (('2G', 'password'), 'passphrase'),
-            (('5G_1', 'password'), 'passphrase_an'),
-            (('5G_2', 'password'), 'passphrase_an_2')
-        ])
-
-        self.power_mode_values = {
-            '1': '100%',
-            '2': '75%',
-            '3': '50%',
-            '4': '25%'
-        }
-
-    def _set_channel_and_bandwidth(self,
-                                   network,
-                                   channel=None,
-                                   bandwidth=None):
-        """Helper function that sets network bandwidth and channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = {network: {}}
-        if channel:
-            if channel not in self.capabilities['channels'][network]:
-                self.log.error('Ch{} is not supported on {} interface.'.format(
-                    channel, network))
-            setting_to_update[network]['channel'] = channel
-
-        if bandwidth is None:
-            return setting_to_update
-
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        setting_to_update[network]['bandwidth'] = str(bandwidth)
-        setting_to_update['enable_ax'] = int('HE' in bandwidth)
-        # Check if other interfaces need to be changed too
-        requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
-        for other_network in self.capabilities['interfaces']:
-            if other_network == network:
-                continue
-            other_mode = 'HE' if 'HE' in self.ap_settings[other_network][
-                'bandwidth'] else 'VHT'
-            other_bw = ''.join([
-                x for x in self.ap_settings[other_network]['bandwidth']
-                if x.isdigit()
-            ])
-            if other_mode != requested_mode:
-                updated_mode = '{}{}'.format(requested_mode, other_bw)
-                self.log.warning('All networks must be VHT or HE. '
-                                 'Updating {} to {}'.format(
-                                     other_network, updated_mode))
-                setting_to_update.setdefault(other_network, {})
-                setting_to_update[other_network]['bandwidth'] = updated_mode
-        return setting_to_update
-
-    def set_bandwidth(self, network, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel(self, network, channel):
-        """Function that sets network channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string or int containing channel
-        """
-        setting_to_update = self._set_channel_and_bandwidth(network,
-                                                            channel=channel)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel_and_bandwidth(self, network, channel, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, channel=channel, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def read_ap_settings(self):
-        """Function to read ap settings."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    browser.visit_persistent(self.config_page_advanced,
-                                             BROWSER_WAIT_MED, 10)
-                    config_item = browser.find_by_name(value)
-                    self.ap_settings[key[0]][key[1]] = int(
-                        config_item.first.checked)
-                    browser.visit_persistent(self.config_page,
-                                             BROWSER_WAIT_MED, 10)
-                else:
-                    config_item = browser.find_by_name(value)
-                    if 'enable_ax' in key:
-                        self.ap_settings[key] = int(config_item.first.checked)
-                    elif 'bandwidth' in key:
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            self.ap_settings['enable_ax']][
-                                config_item.first.value]
-                    elif 'power' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self.power_mode_values[
-                                config_item.first.value]
-                    elif 'region' in key:
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.first.value]
-                    elif 'security_type' in key:
-                        for item in config_item:
-                            if item.checked:
-                                self.ap_settings[key[0]][key[1]] = item.value
-                    elif 'channel' in key:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][key[1]] = int(
-                            config_item.first.value)
-                    else:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][
-                            key[1]] = config_item.first.value
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_nologin,
-                                     BROWSER_WAIT_MED, 10, self.config_page)
-
-            # Update region, and power/bandwidth for each network
-            try:
-                config_item = browser.find_by_name(
-                    self.config_page_fields['region']).first
-                config_item.select_by_text(self.ap_settings['region'])
-            except:
-                self.log.warning('Cannot change region.')
-            for key, value in self.config_page_fields.items():
-                if 'enable_ax' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings['enable_ax']:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-                if 'power' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.select_by_text(
-                        self.ap_settings[key[0]][key[1]])
-                elif 'bandwidth' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select_by_text(self.bw_mode_text[key[0]][
-                            self.ap_settings[key[0]][key[1]]])
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select bandwidth. Keeping AP default.')
-
-            # Update security settings (passwords updated only if applicable)
-            for key, value in self.config_page_fields.items():
-                if 'security_type' in key:
-                    browser.choose(value, self.ap_settings[key[0]][key[1]])
-                    if 'WPA' in self.ap_settings[key[0]][key[1]]:
-                        config_item = browser.find_by_name(
-                            self.config_page_fields[(key[0],
-                                                     'password')]).first
-                        config_item.fill(self.ap_settings[key[0]]['password'])
-
-            for key, value in self.config_page_fields.items():
-                if 'ssid' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.fill(self.ap_settings[key[0]][key[1]])
-                elif 'channel' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select(self.ap_settings[key[0]][key[1]])
-                        time.sleep(BROWSER_WAIT_SHORT)
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select channel. Keeping AP default.')
-                    try:
-                        for idx in range(0, 2):
-                            alert = browser.get_alert()
-                            alert.accept()
-                            time.sleep(BROWSER_WAIT_SHORT)
-                    except:
-                        pass
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_SHORT)
-            try:
-                alert = browser.get_alert()
-                alert.accept()
-                time.sleep(BROWSER_WAIT_SHORT)
-            except:
-                time.sleep(BROWSER_WAIT_SHORT)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-    def configure_radio_on_off(self):
-        """Helper configuration function to turn radios on/off."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED, 10)
-
-            # Turn radios on or off
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings[key[0]][key[1]]:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax80.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax80.py
deleted file mode 100644
index 6c99a3b..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax80.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.wifi.wifi_retail_ap import NetgearR7000AP
-
-
-class NetgearRAX80AP(NetgearR7000AP):
-    """Class that implements Netgear RAX AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def init_gui_data(self):
-        super().init_gui_data()
-        # Overwrite minor differences from R7000 AP
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_dual_band_r10.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_dual_band_r10.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_dual_band2.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1', '5G_2'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [36, 40, 44, 48],
-                '5G_2': [149, 153, 157, 161, 165]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80'],
-                '5G_2': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.bw_mode_values = {
-            'g and b': '11g',
-            '145Mbps': 'VHT20',
-            '300Mbps': 'VHT40',
-            'HT80': 'VHT80',
-            'HT160': 'VHT160'
-        }
-        self.bw_mode_text = {
-            '11g': 'Up to 54 Mbps',
-            'VHT20': 'Up to 600 Mbps',
-            'VHT40': 'Up to 1200 Mbps',
-            'VHT80': 'Up to 2400 Mbps',
-            'VHT160': 'Up to 4800 Mbps'
-        }
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_raxe500.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_raxe500.py
deleted file mode 100644
index 73aeaec..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_raxe500.py
+++ /dev/null
@@ -1,442 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import numpy
-import re
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearRAXE500AP(WifiRetailAP):
-    """Class that implements Netgear RAXE500 AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_firmware()
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_tri_band2.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.firmware_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/ADVANCED_home2_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1', '6G'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 144, 149, 153, 157, 161, 165
-                ],
-                '6G': ['6g' + str(ch) for ch in numpy.arange(37, 222, 16)]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40', 'HE20', 'HE40'],
-                '5G_1': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ],
-                '6G': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ]
-            },
-            'default_mode': 'HE'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.region_map = {
-            '3': 'Australia',
-            '4': 'Canada',
-            '5': 'Europe',
-            '7': 'Japan',
-            '8': 'Korea',
-            '11': 'North America',
-            '16': 'China',
-            '17': 'India',
-            '21': 'Middle East(Saudi Arabia/United Arab Emirates)',
-            '23': 'Singapore',
-            '25': 'Hong Kong',
-            '26': 'Vietnam'
-        }
-
-        self.bw_mode_text = {
-            '2G': {
-                'g and b': 'Up to 54 Mbps',
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps'
-            },
-            '5G_1': {
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'HE80': 'Up to 2400 Mbps',
-                'HE160': 'Up to 4800 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps',
-                'VHT80': 'Up to 2165 Mbps',
-                'VHT160': 'Up to 4330 Mbps'
-            },
-            '6G': {
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'HE80': 'Up to 2400 Mbps',
-                'HE160': 'Up to 4800 Mbps',
-                'VHT20': 'Up to 600 Mbps',
-                'VHT40': 'Up to 1200 Mbps',
-                'VHT80': 'Up to 2400 Mbps',
-                'VHT160': 'Up to 4800 Mbps'
-            }
-        }
-        self.bw_mode_values = {
-            # first key is a boolean indicating if 11ax is enabled
-            0: {
-                'g and b': '11g',
-                'HT20': 'VHT20',
-                'HT40': 'VHT40',
-                'HT80': 'VHT80',
-                'HT160': 'VHT160'
-            },
-            1: {
-                'g and b': '11g',
-                'HT20': 'HE20',
-                'HT40': 'HE40',
-                'HT80': 'HE80',
-                'HT160': 'HE160'
-            }
-        }
-
-        # Config ordering intentional to avoid GUI bugs
-        self.config_page_fields = collections.OrderedDict([
-            ('region', 'WRegion'), ('enable_ax', 'enable_he'),
-            (('2G', 'status'), 'enable_ap'),
-            (('5G_1', 'status'), 'enable_ap_an'),
-            (('6G', 'status'), 'enable_ap_an_2'), (('2G', 'ssid'), 'ssid'),
-            (('5G_1', 'ssid'), 'ssid_an'), (('6G', 'ssid'), 'ssid_an_2'),
-            (('2G', 'channel'), 'w_channel'),
-            (('5G_1', 'channel'), 'w_channel_an'),
-            (('6G', 'channel'), 'w_channel_an_2'),
-            (('2G', 'bandwidth'), 'opmode'),
-            (('5G_1', 'bandwidth'), 'opmode_an'),
-            (('6G', 'bandwidth'), 'opmode_an_2'),
-            (('2G', 'power'), 'enable_tpc'),
-            (('5G_1', 'power'), 'enable_tpc_an'),
-            (('6G', 'security_type'), 'security_type_an_2'),
-            (('5G_1', 'security_type'), 'security_type_an'),
-            (('2G', 'security_type'), 'security_type'),
-            (('2G', 'password'), 'passphrase'),
-            (('5G_1', 'password'), 'passphrase_an'),
-            (('6G', 'password'), 'passphrase_an_2')
-        ])
-
-        self.power_mode_values = {
-            '1': '100%',
-            '2': '75%',
-            '3': '50%',
-            '4': '25%'
-        }
-
-    def _set_channel_and_bandwidth(self,
-                                   network,
-                                   channel=None,
-                                   bandwidth=None):
-        """Helper function that sets network bandwidth and channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-
-        setting_to_update = {network: {}}
-        if channel:
-            if channel not in self.capabilities['channels'][network]:
-                self.log.error('Ch{} is not supported on {} interface.'.format(
-                    channel, network))
-            if isinstance(channel, str) and '6g' in channel:
-                channel = int(channel[2:])
-            setting_to_update[network]['channel'] = channel
-
-        if bandwidth is None:
-            return setting_to_update
-
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        setting_to_update[network]['bandwidth'] = str(bandwidth)
-        setting_to_update['enable_ax'] = int('HE' in bandwidth)
-        # Check if other interfaces need to be changed too
-        requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
-        for other_network in self.capabilities['interfaces']:
-            if other_network == network:
-                continue
-            other_mode = 'HE' if 'HE' in self.ap_settings[other_network][
-                'bandwidth'] else 'VHT'
-            other_bw = ''.join([
-                x for x in self.ap_settings[other_network]['bandwidth']
-                if x.isdigit()
-            ])
-            if other_mode != requested_mode:
-                updated_mode = '{}{}'.format(requested_mode, other_bw)
-                self.log.warning('All networks must be VHT or HE. '
-                                 'Updating {} to {}'.format(
-                                     other_network, updated_mode))
-                setting_to_update.setdefault(other_network, {})
-                setting_to_update[other_network]['bandwidth'] = updated_mode
-        return setting_to_update
-
-    def set_bandwidth(self, network, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel(self, network, channel):
-        """Function that sets network channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string or int containing channel
-        """
-        setting_to_update = self._set_channel_and_bandwidth(network,
-                                                            channel=channel)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel_and_bandwidth(self, network, channel, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, channel=channel, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def read_ap_firmware(self):
-        """Function to read ap settings."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-
-            # Visit URL
-            browser.visit_persistent(self.firmware_page, BROWSER_WAIT_MED, 10)
-            firmware_regex = re.compile(
-                r'Firmware Version[\s\S]+V(?P<version>[0-9._]+)')
-            firmware_version = re.search(firmware_regex, browser.html)
-            if firmware_version:
-                self.ap_settings['firmware_version'] = firmware_version.group(
-                    'version')
-            else:
-                self.ap_settings['firmware_version'] = -1
-
-    def read_ap_settings(self):
-        """Function to read ap settings."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    browser.visit_persistent(self.config_page_advanced,
-                                             BROWSER_WAIT_MED, 10)
-                    config_item = browser.find_by_name(value)
-                    self.ap_settings[key[0]][key[1]] = int(
-                        config_item.first.checked)
-                    browser.visit_persistent(self.config_page,
-                                             BROWSER_WAIT_MED, 10)
-                else:
-                    config_item = browser.find_by_name(value)
-                    if 'enable_ax' in key:
-                        self.ap_settings[key] = int(config_item.first.checked)
-                    elif 'bandwidth' in key:
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            self.ap_settings['enable_ax']][
-                                config_item.first.value]
-                    elif 'power' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self.power_mode_values[
-                                config_item.first.value]
-                    elif 'region' in key:
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.first.value]
-                    elif 'security_type' in key:
-                        for item in config_item:
-                            if item.checked:
-                                self.ap_settings[key[0]][key[1]] = item.value
-                    elif 'channel' in key:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][key[1]] = int(
-                            config_item.first.value)
-                    else:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][
-                            key[1]] = config_item.first.value
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_nologin,
-                                     BROWSER_WAIT_MED, 10, self.config_page)
-
-            # Update region, and power/bandwidth for each network
-            try:
-                config_item = browser.find_by_name(
-                    self.config_page_fields['region']).first
-                config_item.select_by_text(self.ap_settings['region'])
-            except:
-                self.log.warning('Cannot change region.')
-            for key, value in self.config_page_fields.items():
-                if 'enable_ax' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings['enable_ax']:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-                if 'power' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.select_by_text(
-                        self.ap_settings[key[0]][key[1]])
-                elif 'bandwidth' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select_by_text(self.bw_mode_text[key[0]][
-                            self.ap_settings[key[0]][key[1]]])
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select bandwidth. Keeping AP default.')
-
-            # Update security settings (passwords updated only if applicable)
-            for key, value in self.config_page_fields.items():
-                if 'security_type' in key:
-                    browser.choose(value, self.ap_settings[key[0]][key[1]])
-                    if 'WPA' in self.ap_settings[key[0]][key[1]]:
-                        config_item = browser.find_by_name(
-                            self.config_page_fields[(key[0],
-                                                     'password')]).first
-                        config_item.fill(self.ap_settings[key[0]]['password'])
-
-            for key, value in self.config_page_fields.items():
-                if 'ssid' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.fill(self.ap_settings[key[0]][key[1]])
-                elif 'channel' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select(self.ap_settings[key[0]][key[1]])
-                        time.sleep(BROWSER_WAIT_SHORT)
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select channel. Keeping AP default.')
-                    try:
-                        alert = browser.get_alert()
-                        alert.accept()
-                    except:
-                        pass
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_SHORT)
-            try:
-                alert = browser.get_alert()
-                alert.accept()
-                time.sleep(BROWSER_WAIT_SHORT)
-            except:
-                time.sleep(BROWSER_WAIT_SHORT)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-    def configure_radio_on_off(self):
-        """Helper configuration function to turn radios on/off."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED, 10)
-
-            # Turn radios on or off
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings[key[0]][key[1]]:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
diff --git a/src/antlion/test_utils/wifi/wifi_test_utils.py b/src/antlion/test_utils/wifi/wifi_test_utils.py
index 3a6105e..9c04b59 100755
--- a/src/antlion/test_utils/wifi/wifi_test_utils.py
+++ b/src/antlion/test_utils/wifi/wifi_test_utils.py
@@ -26,7 +26,6 @@
 
 from tenacity import retry, stop_after_attempt, wait_fixed
 
-from antlion import asserts
 from antlion import context
 from antlion import signals
 from antlion import utils
@@ -35,9 +34,9 @@
 from antlion.controllers.ap_lib import hostapd_ap_preset
 from antlion.controllers.ap_lib.hostapd_constants import BAND_2G
 from antlion.controllers.ap_lib.hostapd_constants import BAND_5G
-from antlion.test_utils.net import connectivity_const as cconsts
 from antlion.test_utils.wifi import wifi_constants
-from antlion.test_utils.wifi.aware import aware_test_utils as autils
+
+from mobly import asserts
 
 # Default timeout used for reboot, toggle WiFi and Airplane mode,
 # for the system to settle down after the operation.
@@ -61,12 +60,11 @@
 ROAMING_ATTN = {
     "AP1_on_AP2_off": [0, 0, 95, 95],
     "AP1_off_AP2_on": [95, 95, 0, 0],
-    "default": [0, 0, 0, 0]
+    "default": [0, 0, 0, 0],
 }
 
 
-class WifiEnums():
-
+class WifiEnums:
     SSID_KEY = "SSID"  # Used for Wifi & SoftAp
     SSID_PATTERN_KEY = "ssidPattern"
     NETID_KEY = "network_id"
@@ -87,7 +85,9 @@
     AP_BANDS_KEY = "apBands"
     AP_CHANNEL_FREQUENCYS_KEY = "apChannelFrequencies"
     AP_MAC_RANDOMIZATION_SETTING_KEY = "MacRandomizationSetting"
-    AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY = "BridgedModeOpportunisticShutdownEnabled"
+    AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY = (
+        "BridgedModeOpportunisticShutdownEnabled"
+    )
     AP_IEEE80211AX_ENABLED_KEY = "Ieee80211axEnabled"
     AP_MAXCLIENTS_KEY = "MaxNumberOfClients"
     AP_SHUTDOWNTIMEOUT_KEY = "ShutdownTimeoutMillis"
@@ -119,13 +119,13 @@
     WIFI_WPS_INFO_LABEL = 3
     WIFI_WPS_INFO_INVALID = 4
 
-    class SoftApSecurityType():
+    class SoftApSecurityType:
         OPEN = "NONE"
         WPA2 = "WPA2_PSK"
         WPA3_SAE_TRANSITION = "WPA3_SAE_TRANSITION"
         WPA3_SAE = "WPA3_SAE"
 
-    class CountryCode():
+    class CountryCode:
         AUSTRALIA = "AU"
         CHINA = "CN"
         GERMANY = "DE"
@@ -180,12 +180,6 @@
 
     # End of Macros for EAP
 
-    # Macros for wifi p2p.
-    WIFI_P2P_SERVICE_TYPE_ALL = 0
-    WIFI_P2P_SERVICE_TYPE_BONJOUR = 1
-    WIFI_P2P_SERVICE_TYPE_UPNP = 2
-    WIFI_P2P_SERVICE_TYPE_VENDOR_SPECIFIC = 255
-
     class ScanResult:
         CHANNEL_WIDTH_20MHZ = 0
         CHANNEL_WIDTH_40MHZ = 1
@@ -264,7 +258,7 @@
     RTT_MARGIN_OF_ERROR = {
         RttBW.BW_80_SUPPORT: 2,
         RttBW.BW_40_SUPPORT: 5,
-        RttBW.BW_20_SUPPORT: 5
+        RttBW.BW_20_SUPPORT: 5,
     }
 
     # Macros as specified in the WifiScanner code.
@@ -286,15 +280,37 @@
 
     # US Wifi frequencies
     ALL_2G_FREQUENCIES = [
-        2412, 2417, 2422, 2427, 2432, 2437, 2442, 2447, 2452, 2457, 2462
+        2412,
+        2417,
+        2422,
+        2427,
+        2432,
+        2437,
+        2442,
+        2447,
+        2452,
+        2457,
+        2462,
     ]
     DFS_5G_FREQUENCIES = [
-        5260, 5280, 5300, 5320, 5500, 5520, 5540, 5560, 5580, 5600, 5620, 5640,
-        5660, 5680, 5700, 5720
+        5260,
+        5280,
+        5300,
+        5320,
+        5500,
+        5520,
+        5540,
+        5560,
+        5580,
+        5600,
+        5620,
+        5640,
+        5660,
+        5680,
+        5700,
+        5720,
     ]
-    NONE_DFS_5G_FREQUENCIES = [
-        5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825
-    ]
+    NONE_DFS_5G_FREQUENCIES = [5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825]
     ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES
 
     band_to_frequencies = {
@@ -303,13 +319,13 @@
         WIFI_BAND_5_GHZ_DFS_ONLY: DFS_5G_FREQUENCIES,
         WIFI_BAND_5_GHZ_WITH_DFS: ALL_5G_FREQUENCIES,
         WIFI_BAND_BOTH: ALL_2G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES,
-        WIFI_BAND_BOTH_WITH_DFS: ALL_5G_FREQUENCIES + ALL_2G_FREQUENCIES
+        WIFI_BAND_BOTH_WITH_DFS: ALL_5G_FREQUENCIES + ALL_2G_FREQUENCIES,
     }
 
     # TODO: add all of the band mapping.
     softap_band_frequencies = {
         WIFI_CONFIG_SOFTAP_BAND_2G: ALL_2G_FREQUENCIES,
-        WIFI_CONFIG_SOFTAP_BAND_5G: ALL_5G_FREQUENCIES
+        WIFI_CONFIG_SOFTAP_BAND_5G: ALL_5G_FREQUENCIES,
     }
 
     # All Wifi frequencies to channels lookup.
@@ -388,7 +404,7 @@
         11: 2462,
         12: 2467,
         13: 2472,
-        14: 2484
+        14: 2484,
     }
 
     channel_5G_to_freq = {
@@ -437,15 +453,15 @@
         157: 5785,
         159: 5795,
         161: 5805,
-        165: 5825
+        165: 5825,
     }
 
     channel_6G_to_freq = {4 * x + 1: 5955 + 20 * x for x in range(59)}
 
     channel_to_freq = {
-        '2G': channel_2G_to_freq,
-        '5G': channel_5G_to_freq,
-        '6G': channel_6G_to_freq
+        "2G": channel_2G_to_freq,
+        "5G": channel_5G_to_freq,
+        "6G": channel_6G_to_freq,
     }
 
 
@@ -458,18 +474,14 @@
 
     def band_to_freq(self, band):
         _band_to_frequencies = {
-            WifiEnums.WIFI_BAND_24_GHZ:
-            self.ALL_2G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_5_GHZ:
-            self.NONE_DFS_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_5_GHZ_DFS_ONLY:
-            self.DFS_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_5_GHZ_WITH_DFS:
-            self.ALL_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_BOTH:
-            self.ALL_2G_FREQUENCIES + self.NONE_DFS_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_BOTH_WITH_DFS:
-            self.ALL_5G_FREQUENCIES + self.ALL_2G_FREQUENCIES
+            WifiEnums.WIFI_BAND_24_GHZ: self.ALL_2G_FREQUENCIES,
+            WifiEnums.WIFI_BAND_5_GHZ: self.NONE_DFS_5G_FREQUENCIES,
+            WifiEnums.WIFI_BAND_5_GHZ_DFS_ONLY: self.DFS_5G_FREQUENCIES,
+            WifiEnums.WIFI_BAND_5_GHZ_WITH_DFS: self.ALL_5G_FREQUENCIES,
+            WifiEnums.WIFI_BAND_BOTH: self.ALL_2G_FREQUENCIES
+            + self.NONE_DFS_5G_FREQUENCIES,
+            WifiEnums.WIFI_BAND_BOTH_WITH_DFS: self.ALL_5G_FREQUENCIES
+            + self.ALL_2G_FREQUENCIES,
         }
         return _band_to_frequencies[band]
 
@@ -477,33 +489,80 @@
 class WifiChannelUS(WifiChannelBase):
     # US Wifi frequencies
     ALL_2G_FREQUENCIES = [
-        2412, 2417, 2422, 2427, 2432, 2437, 2442, 2447, 2452, 2457, 2462
+        2412,
+        2417,
+        2422,
+        2427,
+        2432,
+        2437,
+        2442,
+        2447,
+        2452,
+        2457,
+        2462,
     ]
-    NONE_DFS_5G_FREQUENCIES = [
-        5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825
-    ]
+    NONE_DFS_5G_FREQUENCIES = [5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825]
     MIX_CHANNEL_SCAN = [
-        2412, 2437, 2462, 5180, 5200, 5280, 5260, 5300, 5500, 5320, 5520, 5560,
-        5700, 5745, 5805
+        2412,
+        2437,
+        2462,
+        5180,
+        5200,
+        5280,
+        5260,
+        5300,
+        5500,
+        5320,
+        5520,
+        5560,
+        5700,
+        5745,
+        5805,
     ]
 
     def __init__(self, model=None, support_addition_channel=[]):
         if model in support_addition_channel:
             self.ALL_2G_FREQUENCIES = [
-                2412, 2417, 2422, 2427, 2432, 2437, 2442, 2447, 2452, 2457,
-                2462, 2467, 2472
-                ]
-        self.DFS_5G_FREQUENCIES = [
-            5260, 5280, 5300, 5320, 5500, 5520, 5540, 5560, 5580, 5600, 5620,
-            5640, 5660, 5680, 5700, 5720
+                2412,
+                2417,
+                2422,
+                2427,
+                2432,
+                2437,
+                2442,
+                2447,
+                2452,
+                2457,
+                2462,
+                2467,
+                2472,
             ]
+        self.DFS_5G_FREQUENCIES = [
+            5260,
+            5280,
+            5300,
+            5320,
+            5500,
+            5520,
+            5540,
+            5560,
+            5580,
+            5600,
+            5620,
+            5640,
+            5660,
+            5680,
+            5700,
+            5720,
+        ]
         self.ALL_5G_FREQUENCIES = self.DFS_5G_FREQUENCIES + self.NONE_DFS_5G_FREQUENCIES
 
 
 class WifiReferenceNetworks:
-    """ Class to parse and return networks of different band and
-        auth type from reference_networks
+    """Class to parse and return networks of different band and
+    auth type from reference_networks
     """
+
     def __init__(self, obj):
         self.reference_networks = obj
         self.WIFI_2G = "2g"
@@ -589,8 +648,9 @@
     """
     match_results = match_networks(target, network_list)
     asserts.assert_true(
-        match_results, "Target network %s, does not exist in network list %s" %
-        (target, network_list))
+        match_results,
+        "Target network %s, does not exist in network list %s" % (target, network_list),
+    )
 
 
 def match_networks(target_params, networks):
@@ -609,8 +669,9 @@
         The networks that match the target parameters.
     """
     results = []
-    asserts.assert_true(target_params,
-                        "Expected networks object 'target_params' is empty")
+    asserts.assert_true(
+        target_params, "Expected networks object 'target_params' is empty"
+    )
     for n in networks:
         add_network = 1
         for k, v in target_params.items():
@@ -638,10 +699,9 @@
         If assert_on_fail is False, function returns True if the device transitions
         to the specified state, False otherwise. If assert_on_fail is True, no return value.
     """
-    return _assert_on_fail_handler(_wait_for_wifi_state,
-                                   assert_on_fail,
-                                   ad,
-                                   state=state)
+    return _assert_on_fail_handler(
+        _wait_for_wifi_state, assert_on_fail, ad, state=state
+    )
 
 
 def _wait_for_wifi_state(ad, state):
@@ -659,11 +719,15 @@
         return
     ad.droid.wifiStartTrackingStateChange()
     fail_msg = "Device did not transition to Wi-Fi state to %s on %s." % (
-        state, ad.serial)
+        state,
+        ad.serial,
+    )
     try:
-        ad.ed.wait_for_event(wifi_constants.WIFI_STATE_CHANGED,
-                             lambda x: x["data"]["enabled"] == state,
-                             SHORT_TIMEOUT)
+        ad.ed.wait_for_event(
+            wifi_constants.WIFI_STATE_CHANGED,
+            lambda x: x["data"]["enabled"] == state,
+            SHORT_TIMEOUT,
+        )
     except Empty:
         asserts.assert_equal(state, ad.droid.wifiCheckState(), fail_msg)
     finally:
@@ -683,10 +747,9 @@
         If assert_on_fail is False, function returns True if the toggle was
         successful, False otherwise. If assert_on_fail is True, no return value.
     """
-    return _assert_on_fail_handler(_wifi_toggle_state,
-                                   assert_on_fail,
-                                   ad,
-                                   new_state=new_state)
+    return _assert_on_fail_handler(
+        _wifi_toggle_state, assert_on_fail, ad, new_state=new_state
+    )
 
 
 def _wifi_toggle_state(ad, new_state=None):
@@ -711,12 +774,13 @@
     # Setting wifi state.
     ad.droid.wifiToggleState(new_state)
     time.sleep(2)
-    fail_msg = "Failed to set Wi-Fi state to %s on %s." % (new_state,
-                                                           ad.serial)
+    fail_msg = "Failed to set Wi-Fi state to %s on %s." % (new_state, ad.serial)
     try:
-        ad.ed.wait_for_event(wifi_constants.WIFI_STATE_CHANGED,
-                             lambda x: x["data"]["enabled"] == new_state,
-                             SHORT_TIMEOUT)
+        ad.ed.wait_for_event(
+            wifi_constants.WIFI_STATE_CHANGED,
+            lambda x: x["data"]["enabled"] == new_state,
+            SHORT_TIMEOUT,
+        )
     except Empty:
         asserts.assert_equal(new_state, ad.droid.wifiCheckState(), fail_msg)
     finally:
@@ -737,21 +801,22 @@
         return
     removed = []
     for n in networks:
-        if n['networkId'] not in removed:
-            ad.droid.wifiForgetNetwork(n['networkId'])
-            removed.append(n['networkId'])
+        if n["networkId"] not in removed:
+            ad.droid.wifiForgetNetwork(n["networkId"])
+            removed.append(n["networkId"])
         else:
             continue
         try:
-            event = ad.ed.pop_event(wifi_constants.WIFI_FORGET_NW_SUCCESS,
-                                    SHORT_TIMEOUT)
+            event = ad.ed.pop_event(
+                wifi_constants.WIFI_FORGET_NW_SUCCESS, SHORT_TIMEOUT
+            )
         except Empty:
             logging.warning("Could not confirm the removal of network %s.", n)
     # Check again to see if there's any network left.
     asserts.assert_true(
         not ad.droid.wifiGetConfiguredNetworks(),
-        "Failed to remove these configured Wi-Fi networks: %s" % networks)
-
+        "Failed to remove these configured Wi-Fi networks: %s" % networks,
+    )
 
 
 def toggle_airplane_mode_on_and_off(ad):
@@ -762,12 +827,16 @@
 
     """
     ad.log.debug("Toggling Airplane mode ON.")
-    asserts.assert_true(utils.force_airplane_mode(ad, True),
-                        "Can not turn on airplane mode on: %s" % ad.serial)
+    asserts.assert_true(
+        utils.force_airplane_mode(ad, True),
+        "Can not turn on airplane mode on: %s" % ad.serial,
+    )
     time.sleep(DEFAULT_TIMEOUT)
     ad.log.debug("Toggling Airplane mode OFF.")
-    asserts.assert_true(utils.force_airplane_mode(ad, False),
-                        "Can not turn on airplane mode on: %s" % ad.serial)
+    asserts.assert_true(
+        utils.force_airplane_mode(ad, False),
+        "Can not turn on airplane mode on: %s" % ad.serial,
+    )
     time.sleep(DEFAULT_TIMEOUT)
 
 
@@ -799,12 +868,13 @@
         return
     removed = []
     for n in networks:
-        if net_ssid in n[WifiEnums.SSID_KEY] and n['networkId'] not in removed:
-            ad.droid.wifiForgetNetwork(n['networkId'])
-            removed.append(n['networkId'])
+        if net_ssid in n[WifiEnums.SSID_KEY] and n["networkId"] not in removed:
+            ad.droid.wifiForgetNetwork(n["networkId"])
+            removed.append(n["networkId"])
             try:
-                event = ad.ed.pop_event(wifi_constants.WIFI_FORGET_NW_SUCCESS,
-                                        SHORT_TIMEOUT)
+                event = ad.ed.pop_event(
+                    wifi_constants.WIFI_FORGET_NW_SUCCESS, SHORT_TIMEOUT
+                )
             except Empty:
                 asserts.fail("Failed to remove network %s." % n)
             break
@@ -823,7 +893,7 @@
     7. Turn off cellular data.
     8. Turn off ambient display.
     """
-    utils.require_sl4a((ad, ))
+    utils.require_sl4a((ad,))
     ad.droid.wifiScannerToggleAlwaysAvailable(False)
     msg = "Failed to turn off location service's scan."
     asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
@@ -835,9 +905,9 @@
     # We don't verify the following settings since they are not critical.
     # Set wpa_supplicant log level to EXCESSIVE.
     output = ad.adb.shell(
-        "wpa_cli -i wlan0 -p -g@android:wpa_wlan0 IFNAME="
-        "wlan0 log_level EXCESSIVE",
-        ignore_status=True)
+        "wpa_cli -i wlan0 -p -g@android:wpa_wlan0 IFNAME=" "wlan0 log_level EXCESSIVE",
+        ignore_status=True,
+    )
     ad.log.info("wpa_supplicant log change status: %s", output)
     utils.sync_device_time(ad)
     ad.droid.telephonyToggleDataConnection(False)
@@ -889,11 +959,9 @@
     ad.ed.clear_all_events()
     ad.droid.wifiStartScan()
     try:
-        events = ad.ed.pop_events("WifiManagerScan(ResultsAvailable|Failure)",
-                                  60)
+        events = ad.ed.pop_events("WifiManagerScan(ResultsAvailable|Failure)", 60)
     except Empty:
-        asserts.fail(
-            "Wi-Fi scan results/failure did not become available within 60s.")
+        asserts.fail("Wi-Fi scan results/failure did not become available within 60s.")
     # If there are multiple matches, we check for atleast one success.
     for event in events:
         if event["name"] == "WifiManagerScanResultsAvailable":
@@ -903,9 +971,7 @@
     return False
 
 
-def start_wifi_connection_scan_and_check_for_network(ad,
-                                                     network_ssid,
-                                                     max_tries=3):
+def start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries=3):
     """
     Start connectivity scans & checks if the |network_ssid| is seen in
     scan results. The method performs a max of |max_tries| connectivity scans
@@ -923,19 +989,19 @@
     for num_tries in range(max_tries):
         if start_wifi_connection_scan_and_return_status(ad):
             scan_results = ad.droid.wifiGetScanResults()
-            match_results = match_networks({WifiEnums.SSID_KEY: network_ssid},
-                                           scan_results)
+            match_results = match_networks(
+                {WifiEnums.SSID_KEY: network_ssid}, scan_results
+            )
             if len(match_results) > 0:
-                ad.log.debug("Found network in %s seconds." %
-                             (time.time() - start_time))
+                ad.log.debug(
+                    "Found network in %s seconds." % (time.time() - start_time)
+                )
                 return True
-    ad.log.debug("Did not find network in %s seconds." %
-                 (time.time() - start_time))
+    ad.log.debug("Did not find network in %s seconds." % (time.time() - start_time))
     return False
 
 
-def start_wifi_connection_scan_and_ensure_network_found(
-        ad, network_ssid, max_tries=3):
+def start_wifi_connection_scan_and_ensure_network_found(ad, network_ssid, max_tries=3):
     """
     Start connectivity scans & ensure the |network_ssid| is seen in
     scan results. The method performs a max of |max_tries| connectivity scans
@@ -948,15 +1014,19 @@
         max_tries: Number of scans to try.
     """
     ad.log.info("Starting scans to ensure %s is present", network_ssid)
-    assert_msg = "Failed to find " + network_ssid + " in scan results" \
+    assert_msg = (
+        "Failed to find " + network_ssid + " in scan results"
         " after " + str(max_tries) + " tries"
+    )
     asserts.assert_true(
-        start_wifi_connection_scan_and_check_for_network(
-            ad, network_ssid, max_tries), assert_msg)
+        start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries),
+        assert_msg,
+    )
 
 
 def start_wifi_connection_scan_and_ensure_network_not_found(
-        ad, network_ssid, max_tries=3):
+    ad, network_ssid, max_tries=3
+):
     """
     Start connectivity scans & ensure the |network_ssid| is not seen in
     scan results. The method performs a max of |max_tries| connectivity scans
@@ -969,11 +1039,14 @@
         max_tries: Number of scans to try.
     """
     ad.log.info("Starting scans to ensure %s is not present", network_ssid)
-    assert_msg = "Found " + network_ssid + " in scan results" \
+    assert_msg = (
+        "Found " + network_ssid + " in scan results"
         " after " + str(max_tries) + " tries"
+    )
     asserts.assert_false(
-        start_wifi_connection_scan_and_check_for_network(
-            ad, network_ssid, max_tries), assert_msg)
+        start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries),
+        assert_msg,
+    )
 
 
 def start_wifi_background_scan(ad, scan_setting):
@@ -987,30 +1060,31 @@
         If scan was started successfully, event data of success event is returned.
     """
     idx = ad.droid.wifiScannerStartBackgroundScan(scan_setting)
-    event = ad.ed.pop_event("WifiScannerScan{}onSuccess".format(idx),
-                            SHORT_TIMEOUT)
-    return event['data']
+    event = ad.ed.pop_event("WifiScannerScan{}onSuccess".format(idx), SHORT_TIMEOUT)
+    return event["data"]
 
 
-def save_wifi_soft_ap_config(ad,
-                             wifi_config,
-                             band=None,
-                             hidden=None,
-                             security=None,
-                             password=None,
-                             channel=None,
-                             max_clients=None,
-                             shutdown_timeout_enable=None,
-                             shutdown_timeout_millis=None,
-                             client_control_enable=None,
-                             allowedList=None,
-                             blockedList=None,
-                             bands=None,
-                             channel_frequencys=None,
-                             mac_randomization_setting=None,
-                             bridged_opportunistic_shutdown_enabled=None,
-                             ieee80211ax_enabled=None):
-    """ Save a soft ap configuration and verified
+def save_wifi_soft_ap_config(
+    ad,
+    wifi_config,
+    band=None,
+    hidden=None,
+    security=None,
+    password=None,
+    channel=None,
+    max_clients=None,
+    shutdown_timeout_enable=None,
+    shutdown_timeout_millis=None,
+    client_control_enable=None,
+    allowedList=None,
+    blockedList=None,
+    bands=None,
+    channel_frequencys=None,
+    mac_randomization_setting=None,
+    bridged_opportunistic_shutdown_enabled=None,
+    ieee80211ax_enabled=None,
+):
+    """Save a soft ap configuration and verified
     Args:
         ad: android_device to set soft ap configuration.
         wifi_config: a soft ap configuration object, at least include SSID.
@@ -1040,8 +1114,7 @@
     if max_clients is not None:
         wifi_config[WifiEnums.AP_MAXCLIENTS_KEY] = max_clients
     if shutdown_timeout_enable is not None:
-        wifi_config[
-            WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY] = shutdown_timeout_enable
+        wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY] = shutdown_timeout_enable
     if shutdown_timeout_millis is not None:
         wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY] = shutdown_timeout_millis
     if client_control_enable is not None:
@@ -1051,13 +1124,15 @@
     if blockedList is not None:
         wifi_config[WifiEnums.AP_BLOCKEDLIST_KEY] = blockedList
     if mac_randomization_setting is not None:
-        wifi_config[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY
-                ] = mac_randomization_setting
+        wifi_config[
+            WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY
+        ] = mac_randomization_setting
     if bridged_opportunistic_shutdown_enabled is not None:
-        wifi_config[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY
-                ] = bridged_opportunistic_shutdown_enabled
+        wifi_config[
+            WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY
+        ] = bridged_opportunistic_shutdown_enabled
     if ieee80211ax_enabled is not None:
-       wifi_config[WifiEnums.AP_IEEE80211AX_ENABLED_KEY]= ieee80211ax_enabled
+        wifi_config[WifiEnums.AP_IEEE80211AX_ENABLED_KEY] = ieee80211ax_enabled
     if channel_frequencys is not None:
         wifi_config[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY] = channel_frequencys
     elif bands is not None:
@@ -1067,101 +1142,122 @@
             wifi_config[WifiEnums.AP_BAND_KEY] = band
             wifi_config[WifiEnums.AP_CHANNEL_KEY] = channel
         else:
-             wifi_config[WifiEnums.AP_BAND_KEY] = band
+            wifi_config[WifiEnums.AP_BAND_KEY] = band
 
-    if WifiEnums.AP_CHANNEL_KEY in wifi_config and wifi_config[
-            WifiEnums.AP_CHANNEL_KEY] == 0:
+    if (
+        WifiEnums.AP_CHANNEL_KEY in wifi_config
+        and wifi_config[WifiEnums.AP_CHANNEL_KEY] == 0
+    ):
         del wifi_config[WifiEnums.AP_CHANNEL_KEY]
 
-    if WifiEnums.SECURITY in wifi_config and wifi_config[
-            WifiEnums.SECURITY] == WifiEnums.SoftApSecurityType.OPEN:
+    if (
+        WifiEnums.SECURITY in wifi_config
+        and wifi_config[WifiEnums.SECURITY] == WifiEnums.SoftApSecurityType.OPEN
+    ):
         del wifi_config[WifiEnums.SECURITY]
         del wifi_config[WifiEnums.PWD_KEY]
 
-    asserts.assert_true(ad.droid.wifiSetWifiApConfiguration(wifi_config),
-                        "Failed to set WifiAp Configuration")
+    asserts.assert_true(
+        ad.droid.wifiSetWifiApConfiguration(wifi_config),
+        "Failed to set WifiAp Configuration",
+    )
 
     wifi_ap = ad.droid.wifiGetApConfiguration()
     asserts.assert_true(
         wifi_ap[WifiEnums.SSID_KEY] == wifi_config[WifiEnums.SSID_KEY],
-        "Hotspot SSID doesn't match")
+        "Hotspot SSID doesn't match",
+    )
     if WifiEnums.SECURITY in wifi_config:
         asserts.assert_true(
             wifi_ap[WifiEnums.SECURITY] == wifi_config[WifiEnums.SECURITY],
-            "Hotspot Security doesn't match")
+            "Hotspot Security doesn't match",
+        )
     if WifiEnums.PWD_KEY in wifi_config:
         asserts.assert_true(
             wifi_ap[WifiEnums.PWD_KEY] == wifi_config[WifiEnums.PWD_KEY],
-            "Hotspot Password doesn't match")
+            "Hotspot Password doesn't match",
+        )
 
     if WifiEnums.HIDDEN_KEY in wifi_config:
         asserts.assert_true(
             wifi_ap[WifiEnums.HIDDEN_KEY] == wifi_config[WifiEnums.HIDDEN_KEY],
-            "Hotspot hidden setting doesn't match")
+            "Hotspot hidden setting doesn't match",
+        )
 
     if WifiEnums.AP_CHANNEL_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_CHANNEL_KEY] == wifi_config[
-                WifiEnums.AP_CHANNEL_KEY], "Hotspot Channel doesn't match")
+            wifi_ap[WifiEnums.AP_CHANNEL_KEY] == wifi_config[WifiEnums.AP_CHANNEL_KEY],
+            "Hotspot Channel doesn't match",
+        )
     if WifiEnums.AP_MAXCLIENTS_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_MAXCLIENTS_KEY] == wifi_config[
-                WifiEnums.AP_MAXCLIENTS_KEY],
-            "Hotspot Max Clients doesn't match")
+            wifi_ap[WifiEnums.AP_MAXCLIENTS_KEY]
+            == wifi_config[WifiEnums.AP_MAXCLIENTS_KEY],
+            "Hotspot Max Clients doesn't match",
+        )
     if WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY] == wifi_config[
-                WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY],
-            "Hotspot ShutDown feature flag doesn't match")
+            wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY]
+            == wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY],
+            "Hotspot ShutDown feature flag doesn't match",
+        )
     if WifiEnums.AP_SHUTDOWNTIMEOUT_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY] == wifi_config[
-                WifiEnums.AP_SHUTDOWNTIMEOUT_KEY],
-            "Hotspot ShutDown timeout setting doesn't match")
+            wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY]
+            == wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY],
+            "Hotspot ShutDown timeout setting doesn't match",
+        )
     if WifiEnums.AP_CLIENTCONTROL_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_CLIENTCONTROL_KEY] == wifi_config[
-                WifiEnums.AP_CLIENTCONTROL_KEY],
-            "Hotspot Client control flag doesn't match")
+            wifi_ap[WifiEnums.AP_CLIENTCONTROL_KEY]
+            == wifi_config[WifiEnums.AP_CLIENTCONTROL_KEY],
+            "Hotspot Client control flag doesn't match",
+        )
     if WifiEnums.AP_ALLOWEDLIST_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_ALLOWEDLIST_KEY] == wifi_config[
-                WifiEnums.AP_ALLOWEDLIST_KEY],
-            "Hotspot Allowed List doesn't match")
+            wifi_ap[WifiEnums.AP_ALLOWEDLIST_KEY]
+            == wifi_config[WifiEnums.AP_ALLOWEDLIST_KEY],
+            "Hotspot Allowed List doesn't match",
+        )
     if WifiEnums.AP_BLOCKEDLIST_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_BLOCKEDLIST_KEY] == wifi_config[
-                WifiEnums.AP_BLOCKEDLIST_KEY],
-            "Hotspot Blocked List doesn't match")
+            wifi_ap[WifiEnums.AP_BLOCKEDLIST_KEY]
+            == wifi_config[WifiEnums.AP_BLOCKEDLIST_KEY],
+            "Hotspot Blocked List doesn't match",
+        )
 
     if WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY] == wifi_config[
-                  WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY],
-            "Hotspot Mac randomization setting doesn't match")
+            wifi_ap[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY]
+            == wifi_config[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY],
+            "Hotspot Mac randomization setting doesn't match",
+        )
 
     if WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY] == wifi_config[
-                  WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY],
-            "Hotspot bridged shutdown enable setting doesn't match")
+            wifi_ap[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY]
+            == wifi_config[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY],
+            "Hotspot bridged shutdown enable setting doesn't match",
+        )
 
     if WifiEnums.AP_IEEE80211AX_ENABLED_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_IEEE80211AX_ENABLED_KEY] == wifi_config[
-                  WifiEnums.AP_IEEE80211AX_ENABLED_KEY],
-            "Hotspot 80211 AX enable setting doesn't match")
+            wifi_ap[WifiEnums.AP_IEEE80211AX_ENABLED_KEY]
+            == wifi_config[WifiEnums.AP_IEEE80211AX_ENABLED_KEY],
+            "Hotspot 80211 AX enable setting doesn't match",
+        )
 
     if WifiEnums.AP_CHANNEL_FREQUENCYS_KEY in wifi_config:
         asserts.assert_true(
-            wifi_ap[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY] == wifi_config[
-                  WifiEnums.AP_CHANNEL_FREQUENCYS_KEY],
-            "Hotspot channels setting doesn't match")
+            wifi_ap[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY]
+            == wifi_config[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY],
+            "Hotspot channels setting doesn't match",
+        )
 
-def toggle_wifi_and_wait_for_reconnection(ad,
-                                          network,
-                                          num_of_tries=1,
-                                          assert_on_fail=True):
+
+def toggle_wifi_and_wait_for_reconnection(
+    ad, network, num_of_tries=1, assert_on_fail=True
+):
     """Toggle wifi state and then wait for Android device to reconnect to
     the provided wifi network.
 
@@ -1188,11 +1284,13 @@
         If assert_on_fail is False, function returns True if the toggle was
         successful, False otherwise. If assert_on_fail is True, no return value.
     """
-    return _assert_on_fail_handler(_toggle_wifi_and_wait_for_reconnection,
-                                   assert_on_fail,
-                                   ad,
-                                   network,
-                                   num_of_tries=num_of_tries)
+    return _assert_on_fail_handler(
+        _toggle_wifi_and_wait_for_reconnection,
+        assert_on_fail,
+        ad,
+        network,
+        num_of_tries=num_of_tries,
+    )
 
 
 def _toggle_wifi_and_wait_for_reconnection(ad, network, num_of_tries=3):
@@ -1231,32 +1329,30 @@
         connect_result = None
         for i in range(num_of_tries):
             try:
-                connect_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED,
-                                                 30)
+                connect_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
                 break
             except Empty:
                 pass
         asserts.assert_true(
-            connect_result, "Failed to connect to Wi-Fi network %s on %s" %
-            (network, ad.serial))
-        logging.debug("Connection result on %s: %s.", ad.serial,
-                      connect_result)
-        actual_ssid = connect_result['data'][WifiEnums.SSID_KEY]
+            connect_result,
+            "Failed to connect to Wi-Fi network %s on %s" % (network, ad.serial),
+        )
+        logging.debug("Connection result on %s: %s.", ad.serial, connect_result)
+        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
         asserts.assert_equal(
-            actual_ssid, expected_ssid, "Connected to the wrong network on %s."
-            "Expected %s, but got %s." %
-            (ad.serial, expected_ssid, actual_ssid))
-        logging.info("Connected to Wi-Fi network %s on %s", actual_ssid,
-                     ad.serial)
+            actual_ssid,
+            expected_ssid,
+            "Connected to the wrong network on %s."
+            "Expected %s, but got %s." % (ad.serial, expected_ssid, actual_ssid),
+        )
+        logging.info("Connected to Wi-Fi network %s on %s", actual_ssid, ad.serial)
     finally:
         ad.droid.wifiStopTrackingStateChange()
 
 
-def wait_for_connect(ad,
-                     expected_ssid=None,
-                     expected_id=None,
-                     tries=2,
-                     assert_on_fail=True):
+def wait_for_connect(
+    ad, expected_ssid=None, expected_id=None, tries=2, assert_on_fail=True
+):
     """Wait for a connect event.
 
     This will directly fail a test if anything goes wrong.
@@ -1273,8 +1369,9 @@
         Returns a value only if assert_on_fail is false.
         Returns True if the connection was successful, False otherwise.
     """
-    return _assert_on_fail_handler(_wait_for_connect, assert_on_fail, ad,
-                                   expected_ssid, expected_id, tries)
+    return _assert_on_fail_handler(
+        _wait_for_connect, assert_on_fail, ad, expected_ssid, expected_id, tries
+    )
 
 
 def _wait_for_connect(ad, expected_ssid=None, expected_id=None, tries=2):
@@ -1288,31 +1385,29 @@
     """
     ad.droid.wifiStartTrackingStateChange()
     try:
-        connect_result = _wait_for_connect_event(ad,
-                                                 ssid=expected_ssid,
-                                                 id=expected_id,
-                                                 tries=tries)
+        connect_result = _wait_for_connect_event(
+            ad, ssid=expected_ssid, id=expected_id, tries=tries
+        )
         asserts.assert_true(
-            connect_result,
-            "Failed to connect to Wi-Fi network %s" % expected_ssid)
+            connect_result, "Failed to connect to Wi-Fi network %s" % expected_ssid
+        )
         ad.log.debug("Wi-Fi connection result: %s.", connect_result)
-        actual_ssid = connect_result['data'][WifiEnums.SSID_KEY]
+        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
         if expected_ssid:
-            asserts.assert_equal(actual_ssid, expected_ssid,
-                                 "Connected to the wrong network")
-        actual_id = connect_result['data'][WifiEnums.NETID_KEY]
+            asserts.assert_equal(
+                actual_ssid, expected_ssid, "Connected to the wrong network"
+            )
+        actual_id = connect_result["data"][WifiEnums.NETID_KEY]
         if expected_id:
-            asserts.assert_equal(actual_id, expected_id,
-                                 "Connected to the wrong network")
+            asserts.assert_equal(
+                actual_id, expected_id, "Connected to the wrong network"
+            )
         ad.log.info("Connected to Wi-Fi network %s.", actual_ssid)
     except Empty:
-        asserts.fail("Failed to start connection process to %s" %
-                     expected_ssid)
+        asserts.fail("Failed to start connection process to %s" % expected_ssid)
     except Exception as error:
-        ad.log.error("Failed to connect to %s with error %s", expected_ssid,
-                     error)
-        raise signals.TestFailure("Failed to connect to %s network" %
-                                  expected_ssid)
+        ad.log.error("Failed to connect to %s with error %s", expected_ssid, error)
+        raise signals.TestFailure("Failed to connect to %s network" % expected_ssid)
     finally:
         ad.droid.wifiStopTrackingStateChange()
 
@@ -1352,8 +1447,7 @@
     if id is None and ssid is None:
         for i in range(tries):
             try:
-                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED,
-                                              30)
+                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
                 break
             except Empty:
                 pass
@@ -1361,11 +1455,10 @@
         # If ssid or network id is specified, wait for specific connect event.
         for i in range(tries):
             try:
-                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED,
-                                              30)
-                if id and conn_result['data'][WifiEnums.NETID_KEY] == id:
+                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
+                if id and conn_result["data"][WifiEnums.NETID_KEY] == id:
                     break
-                elif ssid and conn_result['data'][WifiEnums.SSID_KEY] == ssid:
+                elif ssid and conn_result["data"][WifiEnums.SSID_KEY] == ssid:
                     break
             except Empty:
                 pass
@@ -1408,10 +1501,15 @@
         ad.droid.wifiStopTrackingStateChange()
 
 
-def connect_to_wifi_network(ad, network, assert_on_fail=True,
-                            check_connectivity=True, hidden=False,
-                            num_of_scan_tries=DEFAULT_SCAN_TRIES,
-                            num_of_connect_tries=DEFAULT_CONNECT_TRIES):
+def connect_to_wifi_network(
+    ad,
+    network,
+    assert_on_fail=True,
+    check_connectivity=True,
+    hidden=False,
+    num_of_scan_tries=DEFAULT_SCAN_TRIES,
+    num_of_connect_tries=DEFAULT_CONNECT_TRIES,
+):
     """Connection logic for open and psk wifi networks.
 
     Args:
@@ -1427,15 +1525,19 @@
     """
     if hidden:
         start_wifi_connection_scan_and_ensure_network_not_found(
-            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries)
+            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
+        )
     else:
         start_wifi_connection_scan_and_ensure_network_found(
-            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries)
-    wifi_connect(ad,
-                 network,
-                 num_of_tries=num_of_connect_tries,
-                 assert_on_fail=assert_on_fail,
-                 check_connectivity=check_connectivity)
+            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
+        )
+    wifi_connect(
+        ad,
+        network,
+        num_of_tries=num_of_connect_tries,
+        assert_on_fail=assert_on_fail,
+        check_connectivity=check_connectivity,
+    )
 
 
 def connect_to_wifi_network_with_id(ad, network_id, network_ssid):
@@ -1453,18 +1555,17 @@
     wifi_connect_by_id(ad, network_id)
     connect_data = ad.droid.wifiGetConnectionInfo()
     connect_ssid = connect_data[WifiEnums.SSID_KEY]
-    ad.log.debug("Expected SSID = %s Connected SSID = %s" %
-                 (network_ssid, connect_ssid))
+    ad.log.debug(
+        "Expected SSID = %s Connected SSID = %s" % (network_ssid, connect_ssid)
+    )
     if connect_ssid != network_ssid:
         return False
     return True
 
 
-def wifi_connect(ad,
-                 network,
-                 num_of_tries=1,
-                 assert_on_fail=True,
-                 check_connectivity=True):
+def wifi_connect(
+    ad, network, num_of_tries=1, assert_on_fail=True, check_connectivity=True
+):
     """Connect an Android device to a wifi network.
 
     Initiate connection to a wifi network, wait for the "connected" event, then
@@ -1485,12 +1586,14 @@
         Returns a value only if assert_on_fail is false.
         Returns True if the connection was successful, False otherwise.
     """
-    return _assert_on_fail_handler(_wifi_connect,
-                                   assert_on_fail,
-                                   ad,
-                                   network,
-                                   num_of_tries=num_of_tries,
-                                   check_connectivity=check_connectivity)
+    return _assert_on_fail_handler(
+        _wifi_connect,
+        assert_on_fail,
+        ad,
+        network,
+        num_of_tries=num_of_tries,
+        check_connectivity=check_connectivity,
+    )
 
 
 def _wifi_connect(ad, network, num_of_tries=1, check_connectivity=True):
@@ -1510,37 +1613,42 @@
     """
     asserts.assert_true(
         WifiEnums.SSID_KEY in network,
-        "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY)
+        "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY,
+    )
     ad.droid.wifiStartTrackingStateChange()
     expected_ssid = network[WifiEnums.SSID_KEY]
     ad.droid.wifiConnectByConfig(network)
     ad.log.info("Starting connection process to %s", expected_ssid)
     try:
         event = ad.ed.pop_event(wifi_constants.CONNECT_BY_CONFIG_SUCCESS, 30)
-        connect_result = _wait_for_connect_event(ad,
-                                                 ssid=expected_ssid,
-                                                 tries=num_of_tries)
+        connect_result = _wait_for_connect_event(
+            ad, ssid=expected_ssid, tries=num_of_tries
+        )
         asserts.assert_true(
-            connect_result, "Failed to connect to Wi-Fi network %s on %s" %
-            (network, ad.serial))
+            connect_result,
+            "Failed to connect to Wi-Fi network %s on %s" % (network, ad.serial),
+        )
         ad.log.debug("Wi-Fi connection result: %s.", connect_result)
-        actual_ssid = connect_result['data'][WifiEnums.SSID_KEY]
+        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
         asserts.assert_equal(
-            actual_ssid, expected_ssid,
-            "Connected to the wrong network on %s." % ad.serial)
+            actual_ssid,
+            expected_ssid,
+            "Connected to the wrong network on %s." % ad.serial,
+        )
         ad.log.info("Connected to Wi-Fi network %s.", actual_ssid)
 
         if check_connectivity:
             internet = validate_connection(ad, DEFAULT_PING_ADDR)
             if not internet:
                 raise signals.TestFailure(
-                    "Failed to connect to internet on %s" % expected_ssid)
+                    "Failed to connect to internet on %s" % expected_ssid
+                )
     except Empty:
-        asserts.fail("Failed to start connection process to %s on %s" %
-                     (network, ad.serial))
+        asserts.fail(
+            "Failed to start connection process to %s on %s" % (network, ad.serial)
+        )
     except Exception as error:
-        ad.log.error("Failed to connect to %s with error %s", expected_ssid,
-                     error)
+        ad.log.error("Failed to connect to %s with error %s", expected_ssid, error)
         raise signals.TestFailure("Failed to connect to %s network" % network)
 
     finally:
@@ -1567,8 +1675,9 @@
         Returns a value only if assert_on_fail is false.
         Returns True if the connection was successful, False otherwise.
     """
-    _assert_on_fail_handler(_wifi_connect_by_id, assert_on_fail, ad,
-                            network_id, num_of_tries)
+    _assert_on_fail_handler(
+        _wifi_connect_by_id, assert_on_fail, ad, network_id, num_of_tries
+    )
 
 
 def _wifi_connect_by_id(ad, network_id, num_of_tries=1):
@@ -1590,42 +1699,47 @@
     ad.log.info("Starting connection to network with id %d", network_id)
     try:
         event = ad.ed.pop_event(wifi_constants.CONNECT_BY_NETID_SUCCESS, 60)
-        connect_result = _wait_for_connect_event(ad,
-                                                 id=network_id,
-                                                 tries=num_of_tries)
+        connect_result = _wait_for_connect_event(ad, id=network_id, tries=num_of_tries)
         asserts.assert_true(
-            connect_result,
-            "Failed to connect to Wi-Fi network using network id")
+            connect_result, "Failed to connect to Wi-Fi network using network id"
+        )
         ad.log.debug("Wi-Fi connection result: %s", connect_result)
-        actual_id = connect_result['data'][WifiEnums.NETID_KEY]
+        actual_id = connect_result["data"][WifiEnums.NETID_KEY]
         asserts.assert_equal(
-            actual_id, network_id, "Connected to the wrong network on %s."
-            "Expected network id = %d, but got %d." %
-            (ad.serial, network_id, actual_id))
-        expected_ssid = connect_result['data'][WifiEnums.SSID_KEY]
-        ad.log.info("Connected to Wi-Fi network %s with %d network id.",
-                    expected_ssid, network_id)
+            actual_id,
+            network_id,
+            "Connected to the wrong network on %s."
+            "Expected network id = %d, but got %d."
+            % (ad.serial, network_id, actual_id),
+        )
+        expected_ssid = connect_result["data"][WifiEnums.SSID_KEY]
+        ad.log.info(
+            "Connected to Wi-Fi network %s with %d network id.",
+            expected_ssid,
+            network_id,
+        )
 
         internet = validate_connection(ad, DEFAULT_PING_ADDR)
         if not internet:
-            raise signals.TestFailure("Failed to connect to internet on %s" %
-                                      expected_ssid)
+            raise signals.TestFailure(
+                "Failed to connect to internet on %s" % expected_ssid
+            )
     except Empty:
-        asserts.fail("Failed to connect to network with id %d on %s" %
-                     (network_id, ad.serial))
+        asserts.fail(
+            "Failed to connect to network with id %d on %s" % (network_id, ad.serial)
+        )
     except Exception as error:
-        ad.log.error("Failed to connect to network with id %d with error %s",
-                     network_id, error)
-        raise signals.TestFailure("Failed to connect to network with network"
-                                  " id %d" % network_id)
+        ad.log.error(
+            "Failed to connect to network with id %d with error %s", network_id, error
+        )
+        raise signals.TestFailure(
+            "Failed to connect to network with network" " id %d" % network_id
+        )
     finally:
         ad.droid.wifiStopTrackingStateChange()
 
 
-def wifi_connect_using_network_request(ad,
-                                       network,
-                                       network_specifier,
-                                       num_of_tries=3):
+def wifi_connect_using_network_request(ad, network, network_specifier, num_of_tries=3):
     """Connect an Android device to a wifi network using network request.
 
     Trigger a network request with the provided network specifier,
@@ -1650,16 +1764,13 @@
     # Need a delay here because UI interaction should only start once wifi
     # starts processing the request.
     time.sleep(wifi_constants.NETWORK_REQUEST_CB_REGISTER_DELAY_SEC)
-    _wait_for_wifi_connect_after_network_request(ad, network, key,
-                                                 num_of_tries)
+    _wait_for_wifi_connect_after_network_request(ad, network, key, num_of_tries)
     return key
 
 
-def wait_for_wifi_connect_after_network_request(ad,
-                                                network,
-                                                key,
-                                                num_of_tries=3,
-                                                assert_on_fail=True):
+def wait_for_wifi_connect_after_network_request(
+    ad, network, key, num_of_tries=3, assert_on_fail=True
+):
     """
     Simulate and verify the connection flow after initiating the network
     request.
@@ -1683,14 +1794,17 @@
         Returns a value only if assert_on_fail is false.
         Returns True if the connection was successful, False otherwise.
     """
-    _assert_on_fail_handler(_wait_for_wifi_connect_after_network_request,
-                            assert_on_fail, ad, network, key, num_of_tries)
+    _assert_on_fail_handler(
+        _wait_for_wifi_connect_after_network_request,
+        assert_on_fail,
+        ad,
+        network,
+        key,
+        num_of_tries,
+    )
 
 
-def _wait_for_wifi_connect_after_network_request(ad,
-                                                 network,
-                                                 key,
-                                                 num_of_tries=3):
+def _wait_for_wifi_connect_after_network_request(ad, network, key, num_of_tries=3):
     """
     Simulate and verify the connection flow after initiating the network
     request.
@@ -1710,7 +1824,8 @@
     """
     asserts.assert_true(
         WifiEnums.SSID_KEY in network,
-        "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY)
+        "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY,
+    )
     ad.droid.wifiStartTrackingStateChange()
     expected_ssid = network[WifiEnums.SSID_KEY]
     ad.droid.wifiRegisterNetworkRequestMatchCallback()
@@ -1720,64 +1835,47 @@
         matched_network = None
         for _ in [0, num_of_tries]:
             on_match_event = ad.ed.pop_event(
-                wifi_constants.WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH, 60)
-            asserts.assert_true(on_match_event,
-                                "Network request on match not received.")
+                wifi_constants.WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH, 60
+            )
+            asserts.assert_true(
+                on_match_event, "Network request on match not received."
+            )
             matched_scan_results = on_match_event["data"]
-            ad.log.debug("Network request on match results %s",
-                         matched_scan_results)
+            ad.log.debug("Network request on match results %s", matched_scan_results)
             matched_network = match_networks(
-                {WifiEnums.SSID_KEY: network[WifiEnums.SSID_KEY]},
-                matched_scan_results)
+                {WifiEnums.SSID_KEY: network[WifiEnums.SSID_KEY]}, matched_scan_results
+            )
             ad.log.debug("Network request on match %s", matched_network)
             if matched_network:
                 break
 
-        asserts.assert_true(matched_network,
-                            "Target network %s not found" % network)
+        asserts.assert_true(matched_network, "Target network %s not found" % network)
 
         ad.droid.wifiSendUserSelectionForNetworkRequestMatch(network)
-        ad.log.info("Sent user selection for network request %s",
-                    expected_ssid)
+        ad.log.info("Sent user selection for network request %s", expected_ssid)
 
         # Wait for the platform to connect to the network.
-        autils.wait_for_event_with_keys(
-            ad, cconsts.EVENT_NETWORK_CALLBACK, 60,
-            (cconsts.NETWORK_CB_KEY_ID, key),
-            (cconsts.NETWORK_CB_KEY_EVENT, cconsts.NETWORK_CB_AVAILABLE))
-        on_capabilities_changed = autils.wait_for_event_with_keys(
-            ad, cconsts.EVENT_NETWORK_CALLBACK, 10,
-            (cconsts.NETWORK_CB_KEY_ID, key),
-            (cconsts.NETWORK_CB_KEY_EVENT,
-             cconsts.NETWORK_CB_CAPABILITIES_CHANGED))
         connected_network = None
         # WifiInfo is attached to TransportInfo only in S.
-        if ad.droid.isSdkAtLeastS():
-            connected_network = (
-                on_capabilities_changed["data"][
-                    cconsts.NETWORK_CB_KEY_TRANSPORT_INFO]
-            )
-        else:
+        if not ad.droid.isSdkAtLeastS():
             connected_network = ad.droid.wifiGetConnectionInfo()
         ad.log.info("Connected to network %s", connected_network)
         asserts.assert_equal(
-            connected_network[WifiEnums.SSID_KEY], expected_ssid,
+            connected_network[WifiEnums.SSID_KEY],
+            expected_ssid,
             "Connected to the wrong network."
-            "Expected %s, but got %s." % (network, connected_network))
+            "Expected %s, but got %s." % (network, connected_network),
+        )
     except Empty:
         asserts.fail("Failed to connect to %s" % expected_ssid)
     except Exception as error:
-        ad.log.error("Failed to connect to %s with error %s" %
-                     (expected_ssid, error))
+        ad.log.error("Failed to connect to %s with error %s" % (expected_ssid, error))
         raise signals.TestFailure("Failed to connect to %s network" % network)
     finally:
         ad.droid.wifiStopTrackingStateChange()
 
 
-def wifi_passpoint_connect(ad,
-                           passpoint_network,
-                           num_of_tries=1,
-                           assert_on_fail=True):
+def wifi_passpoint_connect(ad, passpoint_network, num_of_tries=1, assert_on_fail=True):
     """Connect an Android device to a wifi network.
 
     Initiate connection to a wifi network, wait for the "connected" event, then
@@ -1797,11 +1895,13 @@
         If assert_on_fail is False, function returns network id, if the connect was
         successful, False otherwise. If assert_on_fail is True, no return value.
     """
-    _assert_on_fail_handler(_wifi_passpoint_connect,
-                            assert_on_fail,
-                            ad,
-                            passpoint_network,
-                            num_of_tries=num_of_tries)
+    _assert_on_fail_handler(
+        _wifi_passpoint_connect,
+        assert_on_fail,
+        ad,
+        passpoint_network,
+        num_of_tries=num_of_tries,
+    )
 
 
 def _wifi_passpoint_connect(ad, passpoint_network, num_of_tries=1):
@@ -1823,27 +1923,35 @@
     ad.log.info("Starting connection process to passpoint %s", expected_ssid)
 
     try:
-        connect_result = _wait_for_connect_event(ad, expected_ssid,
-                                                 num_of_tries)
+        connect_result = _wait_for_connect_event(ad, expected_ssid, num_of_tries)
         asserts.assert_true(
-            connect_result, "Failed to connect to WiFi passpoint network %s on"
-            " %s" % (expected_ssid, ad.serial))
+            connect_result,
+            "Failed to connect to WiFi passpoint network %s on"
+            " %s" % (expected_ssid, ad.serial),
+        )
         ad.log.info("Wi-Fi connection result: %s.", connect_result)
-        actual_ssid = connect_result['data'][WifiEnums.SSID_KEY]
+        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
         asserts.assert_equal(
-            actual_ssid, expected_ssid,
-            "Connected to the wrong network on %s." % ad.serial)
+            actual_ssid,
+            expected_ssid,
+            "Connected to the wrong network on %s." % ad.serial,
+        )
         ad.log.info("Connected to Wi-Fi passpoint network %s.", actual_ssid)
 
         internet = validate_connection(ad, DEFAULT_PING_ADDR)
         if not internet:
-            raise signals.TestFailure("Failed to connect to internet on %s" %
-                                      expected_ssid)
+            raise signals.TestFailure(
+                "Failed to connect to internet on %s" % expected_ssid
+            )
     except Exception as error:
-        ad.log.error("Failed to connect to passpoint network %s with error %s",
-                     expected_ssid, error)
-        raise signals.TestFailure("Failed to connect to %s passpoint network" %
-                                  expected_ssid)
+        ad.log.error(
+            "Failed to connect to passpoint network %s with error %s",
+            expected_ssid,
+            error,
+        )
+        raise signals.TestFailure(
+            "Failed to connect to %s passpoint network" % expected_ssid
+        )
 
     finally:
         ad.droid.wifiStopTrackingStateChange()
@@ -1856,8 +1964,10 @@
         return True
     except Exception as error:
         ad.log.error(
-            "Failed to remove passpoint configuration with FQDN=%s "
-            "and error=%s", fqdn, error)
+            "Failed to remove passpoint configuration with FQDN=%s " "and error=%s",
+            fqdn,
+            error,
+        )
         return False
 
 
@@ -1874,7 +1984,7 @@
     idx = ad.droid.wifiScannerStartScan(scan_setting)
     event = ad.ed.pop_event("WifiScannerScan%sonSuccess" % idx, SHORT_TIMEOUT)
     ad.log.debug("Got event %s", event)
-    return event['data']
+    return event["data"]
 
 
 def track_connection(ad, network_ssid, check_connection_count):
@@ -1892,8 +2002,10 @@
     while check_connection_count > 0:
         connect_network = ad.ed.pop_event("WifiNetworkConnected", 120)
         ad.log.info("Connected to network %s", connect_network)
-        if (WifiEnums.SSID_KEY in connect_network['data'] and
-                connect_network['data'][WifiEnums.SSID_KEY] == network_ssid):
+        if (
+            WifiEnums.SSID_KEY in connect_network["data"]
+            and connect_network["data"][WifiEnums.SSID_KEY] == network_ssid
+        ):
             return True
         check_connection_count -= 1
     ad.droid.wifiStopTrackingStateChange()
@@ -1922,7 +2034,7 @@
     scan_time = len(scan_channels) * stime_channel
     for channel in scan_channels:
         if channel in WifiEnums.DFS_5G_FREQUENCIES:
-            scan_time += 132  #passive scan time on DFS
+            scan_time += 132  # passive scan time on DFS
     return scan_time, scan_channels
 
 
@@ -1937,10 +2049,10 @@
       If tracking started successfully, event data of success event is returned.
     """
     idx = ad.droid.wifiScannerStartTrackingBssids(
-        track_setting["bssidInfos"], track_setting["apLostThreshold"])
-    event = ad.ed.pop_event("WifiScannerBssid{}onSuccess".format(idx),
-                            SHORT_TIMEOUT)
-    return event['data']
+        track_setting["bssidInfos"], track_setting["apLostThreshold"]
+    )
+    event = ad.ed.pop_event("WifiScannerBssid{}onSuccess".format(idx), SHORT_TIMEOUT)
+    return event["data"]
 
 
 def convert_pem_key_to_pkcs8(in_file, out_file):
@@ -1956,17 +2068,16 @@
         filename.
     """
     asserts.assert_true(in_file.endswith(".pem"), "Input file has to be .pem.")
-    asserts.assert_true(out_file.endswith(".der"),
-                        "Output file has to be .der.")
-    cmd = ("openssl pkcs8 -inform PEM -in {} -outform DER -out {} -nocrypt"
-           " -topk8").format(in_file, out_file)
+    asserts.assert_true(out_file.endswith(".der"), "Output file has to be .der.")
+    cmd = (
+        "openssl pkcs8 -inform PEM -in {} -outform DER -out {} -nocrypt" " -topk8"
+    ).format(in_file, out_file)
     utils.exe_cmd(cmd)
 
 
-def validate_connection(ad,
-                        ping_addr=DEFAULT_PING_ADDR,
-                        wait_time=15,
-                        ping_gateway=True):
+def validate_connection(
+    ad, ping_addr=DEFAULT_PING_ADDR, wait_time=15, ping_gateway=True
+):
     """Validate internet connection by pinging the address provided.
 
     Args:
@@ -1981,7 +2092,9 @@
     # wait_time to allow for DHCP to complete.
     for i in range(wait_time):
         if ad.droid.connectivityNetworkIsConnected():
-            if (android_version > 10 and ad.droid.connectivityGetIPv4DefaultGateway()) or android_version < 11:
+            if (
+                android_version > 10 and ad.droid.connectivityGetIPv4DefaultGateway()
+            ) or android_version < 11:
                 break
         time.sleep(1)
     ping = False
@@ -1999,7 +2112,7 @@
     return ping
 
 
-#TODO(angli): This can only verify if an actual value is exactly the same.
+# TODO(angli): This can only verify if an actual value is exactly the same.
 # Would be nice to be able to verify an actual value is one of serveral.
 def verify_wifi_connection_info(ad, expected_con):
     """Verifies that the information of the currently connected wifi network is
@@ -2016,28 +2129,25 @@
         # Do not verify authentication related fields.
         if k == "password":
             continue
-        msg = "Field %s does not exist in wifi connection info %s." % (
-            k, current_con)
+        msg = "Field %s does not exist in wifi connection info %s." % (k, current_con)
         if k not in current_con:
             raise signals.TestFailure(msg)
         actual_v = current_con[k]
         if k in case_insensitive:
             actual_v = actual_v.lower()
             expected_v = expected_v.lower()
-        msg = "Expected %s to be %s, actual %s is %s." % (k, expected_v, k,
-                                                          actual_v)
+        msg = "Expected %s to be %s, actual %s is %s." % (k, expected_v, k, actual_v)
         if actual_v != expected_v:
             raise signals.TestFailure(msg)
 
 
-def check_autoconnect_to_open_network(
-        ad, conn_timeout=WIFI_CONNECTION_TIMEOUT_DEFAULT):
+def check_autoconnect_to_open_network(ad, conn_timeout=WIFI_CONNECTION_TIMEOUT_DEFAULT):
     """Connects to any open WiFI AP
-     Args:
-         timeout value in sec to wait for UE to connect to a WiFi AP
-     Returns:
-         True if UE connects to WiFi AP (supplicant_state = completed)
-         False if UE fails to complete connection within WIFI_CONNECTION_TIMEOUT time.
+    Args:
+        timeout value in sec to wait for UE to connect to a WiFi AP
+    Returns:
+        True if UE connects to WiFi AP (supplicant_state = completed)
+        False if UE fails to complete connection within WIFI_CONNECTION_TIMEOUT time.
     """
     if ad.droid.wifiCheckState():
         return True
@@ -2045,8 +2155,7 @@
     wifi_connection_state = None
     timeout = time.time() + conn_timeout
     while wifi_connection_state != "completed":
-        wifi_connection_state = ad.droid.wifiGetConnectionInfo(
-        )['supplicant_state']
+        wifi_connection_state = ad.droid.wifiGetConnectionInfo()["supplicant_state"]
         if time.time() > timeout:
             ad.log.warning("Failed to connect to WiFi AP")
             return False
@@ -2070,8 +2179,10 @@
         phase2_types = [WifiEnums.EapPhase2.GTC, WifiEnums.EapPhase2.MSCHAPV2]
     for phase2_type in phase2_types:
         # Skip a special case for passpoint TTLS.
-        if (WifiEnums.Enterprise.FQDN in config
-                and phase2_type == WifiEnums.EapPhase2.GTC):
+        if (
+            WifiEnums.Enterprise.FQDN in config
+            and phase2_type == WifiEnums.EapPhase2.GTC
+        ):
             continue
         c = dict(config)
         c[WifiEnums.Enterprise.PHASE2] = phase2_type.value
@@ -2080,7 +2191,7 @@
 
 
 def generate_eap_test_name(config, ad=None):
-    """ Generates a test case name based on an EAP configuration.
+    """Generates a test case name based on an EAP configuration.
 
     Args:
         config: A dict representing an EAP credential.
@@ -2147,8 +2258,13 @@
         attn1.add(attenuators[2])
         attn1.add(attenuators[3])
     else:
-        asserts.fail(("Either two or four attenuators are required for this "
-                      "test, but found %s") % num_of_attns)
+        asserts.fail(
+            (
+                "Either two or four attenuators are required for this "
+                "test, but found %s"
+            )
+            % num_of_attns
+        )
     return [attn0, attn1]
 
 
@@ -2167,16 +2283,13 @@
         attenuator[2].set_atten(roaming_attn[attn_val_name][2])
         attenuator[3].set_atten(roaming_attn[attn_val_name][3])
     except:
-        logging.exception("Failed to set attenuation values %s.",
-                          attn_val_name)
+        logging.exception("Failed to set attenuation values %s.", attn_val_name)
         raise
 
 
-def set_attns_steps(attenuators,
-                    atten_val_name,
-                    roaming_attn=ROAMING_ATTN,
-                    steps=10,
-                    wait_time=12):
+def set_attns_steps(
+    attenuators, atten_val_name, roaming_attn=ROAMING_ATTN, steps=10, wait_time=12
+):
     """Set attenuation values on attenuators used in this test. It will change
     the attenuation values linearly from current value to target value step by
     step.
@@ -2189,8 +2302,11 @@
         steps: Number of attenuator changes to reach the target value.
         wait_time: Sleep time for each change of attenuator.
     """
-    logging.info("Set attenuation values to %s in %d step(s)",
-                 roaming_attn[atten_val_name], steps)
+    logging.info(
+        "Set attenuation values to %s in %d step(s)",
+        roaming_attn[atten_val_name],
+        steps,
+    )
     start_atten = [attenuator.get_atten() for attenuator in attenuators]
     target_atten = roaming_attn[atten_val_name]
     for current_step in range(steps):
@@ -2201,11 +2317,9 @@
         time.sleep(wait_time)
 
 
-def trigger_roaming_and_validate(dut,
-                                 attenuator,
-                                 attn_val_name,
-                                 expected_con,
-                                 roaming_attn=ROAMING_ATTN):
+def trigger_roaming_and_validate(
+    dut, attenuator, attn_val_name, expected_con, roaming_attn=ROAMING_ATTN
+):
     """Sets attenuators to trigger roaming and validate the DUT connected
     to the BSSID expected.
 
@@ -2225,8 +2339,7 @@
     expected_bssid = expected_con[WifiEnums.BSSID_KEY]
     logging.info("Roamed to %s successfully", expected_bssid)
     if not validate_connection(dut):
-        raise signals.TestFailure("Fail to connect to internet on %s" %
-                                  expected_bssid)
+        raise signals.TestFailure("Fail to connect to internet on %s" % expected_bssid)
 
 
 def create_softap_config():
@@ -2241,28 +2354,36 @@
     return config
 
 
-def wait_for_expected_number_of_softap_clients(ad, callbackId,
-                                               expected_num_of_softap_clients):
+def wait_for_expected_number_of_softap_clients(
+    ad, callbackId, expected_num_of_softap_clients
+):
     """Wait for the number of softap clients to be updated as expected.
     Args:
         callbackId: Id of the callback associated with registering.
         expected_num_of_softap_clients: expected number of softap clients.
     """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED
-    clientData = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)['data']
+    eventStr = (
+        wifi_constants.SOFTAP_CALLBACK_EVENT
+        + str(callbackId)
+        + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED
+    )
+    clientData = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)["data"]
     clientCount = clientData[wifi_constants.SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY]
-    clientMacAddresses = clientData[
-        wifi_constants.SOFTAP_CLIENTS_MACS_CALLBACK_KEY]
+    clientMacAddresses = clientData[wifi_constants.SOFTAP_CLIENTS_MACS_CALLBACK_KEY]
     asserts.assert_equal(
-        clientCount, expected_num_of_softap_clients,
-        "The number of softap clients doesn't match the expected number")
+        clientCount,
+        expected_num_of_softap_clients,
+        "The number of softap clients doesn't match the expected number",
+    )
     asserts.assert_equal(
-        len(clientMacAddresses), expected_num_of_softap_clients,
-        "The number of mac addresses doesn't match the expected number")
+        len(clientMacAddresses),
+        expected_num_of_softap_clients,
+        "The number of mac addresses doesn't match the expected number",
+    )
     for macAddress in clientMacAddresses:
-        asserts.assert_true(checkMacAddress(macAddress),
-                            "An invalid mac address was returned")
+        asserts.assert_true(
+            checkMacAddress(macAddress), "An invalid mac address was returned"
+        )
 
 
 def checkMacAddress(input):
@@ -2285,13 +2406,18 @@
         callbackId: Id of the callback associated with registering.
         expected_softap_state: The expected softap state.
     """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_STATE_CHANGED
+    eventStr = (
+        wifi_constants.SOFTAP_CALLBACK_EVENT
+        + str(callbackId)
+        + wifi_constants.SOFTAP_STATE_CHANGED
+    )
     asserts.assert_equal(
-        ad.ed.pop_event(eventStr, SHORT_TIMEOUT)['data'][
-            wifi_constants.SOFTAP_STATE_CHANGE_CALLBACK_KEY],
+        ad.ed.pop_event(eventStr, SHORT_TIMEOUT)["data"][
+            wifi_constants.SOFTAP_STATE_CHANGE_CALLBACK_KEY
+        ],
         expected_softap_state,
-        "Softap state doesn't match with expected state")
+        "Softap state doesn't match with expected state",
+    )
 
 
 def get_current_number_of_softap_clients(ad, callbackId):
@@ -2303,12 +2429,16 @@
         If exist aleast callback, returns last updated number_of_softap_clients.
         Returns None when no any match callback event in queue.
     """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED
+    eventStr = (
+        wifi_constants.SOFTAP_CALLBACK_EVENT
+        + str(callbackId)
+        + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED
+    )
     events = ad.ed.pop_all(eventStr)
     for event in events:
-        num_of_clients = event['data'][
-            wifi_constants.SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY]
+        num_of_clients = event["data"][
+            wifi_constants.SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY
+        ]
     if len(events) == 0:
         return None
     return num_of_clients
@@ -2322,30 +2452,32 @@
     Returns:
         Returns last updated information of softap.
     """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_INFO_CHANGED
+    eventStr = (
+        wifi_constants.SOFTAP_CALLBACK_EVENT
+        + str(callbackId)
+        + wifi_constants.SOFTAP_INFO_CHANGED
+    )
     ad.log.debug("softap info dump from eventStr %s", eventStr)
     frequency = 0
     bandwidth = 0
-    if (need_to_wait):
+    if need_to_wait:
         event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
-        frequency = event['data'][
-            wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
-        bandwidth = event['data'][
-            wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
-        ad.log.info("softap info updated, frequency is %s, bandwidth is %s",
-                    frequency, bandwidth)
+        frequency = event["data"][wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
+        bandwidth = event["data"][wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
+        ad.log.info(
+            "softap info updated, frequency is %s, bandwidth is %s",
+            frequency,
+            bandwidth,
+        )
 
     events = ad.ed.pop_all(eventStr)
     for event in events:
-        frequency = event['data'][
-            wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
-        bandwidth = event['data'][
-            wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
-    ad.log.info("softap info, frequency is %s, bandwidth is %s", frequency,
-                bandwidth)
+        frequency = event["data"][wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
+        bandwidth = event["data"][wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
+    ad.log.info("softap info, frequency is %s, bandwidth is %s", frequency, bandwidth)
     return frequency, bandwidth
 
+
 def get_current_softap_infos(ad, callbackId, need_to_wait):
     """pop up all of softap info list changed event from queue.
     Args:
@@ -2354,33 +2486,37 @@
     Returns:
         Returns last updated informations of softap.
     """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_INFOLIST_CHANGED
+    eventStr = (
+        wifi_constants.SOFTAP_CALLBACK_EVENT
+        + str(callbackId)
+        + wifi_constants.SOFTAP_INFOLIST_CHANGED
+    )
     ad.log.debug("softap info dump from eventStr %s", eventStr)
 
-    if (need_to_wait):
+    if need_to_wait:
         event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
-        infos = event['data']
+        infos = event["data"]
 
     events = ad.ed.pop_all(eventStr)
     for event in events:
-        infos = event['data']
+        infos = event["data"]
 
     for info in infos:
-        frequency = info[
-            wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
-        bandwidth = info[
-            wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
-        wifistandard = info[
-            wifi_constants.SOFTAP_INFO_WIFISTANDARD_CALLBACK_KEY]
-        bssid = info[
-            wifi_constants.SOFTAP_INFO_BSSID_CALLBACK_KEY]
+        frequency = info[wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
+        bandwidth = info[wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
+        wifistandard = info[wifi_constants.SOFTAP_INFO_WIFISTANDARD_CALLBACK_KEY]
+        bssid = info[wifi_constants.SOFTAP_INFO_BSSID_CALLBACK_KEY]
         ad.log.info(
-                "softap info, freq:%s, bw:%s, wifistandard:%s, bssid:%s",
-                frequency, bandwidth, wifistandard, bssid)
+            "softap info, freq:%s, bw:%s, wifistandard:%s, bssid:%s",
+            frequency,
+            bandwidth,
+            wifistandard,
+            bssid,
+        )
 
     return infos
 
+
 def get_current_softap_capability(ad, callbackId, need_to_wait):
     """pop up all of softap info list changed event from queue.
     Args:
@@ -2389,19 +2525,23 @@
     Returns:
         Returns last updated capability of softap.
     """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-            callbackId) + wifi_constants.SOFTAP_CAPABILITY_CHANGED
+    eventStr = (
+        wifi_constants.SOFTAP_CALLBACK_EVENT
+        + str(callbackId)
+        + wifi_constants.SOFTAP_CAPABILITY_CHANGED
+    )
     ad.log.debug("softap capability dump from eventStr %s", eventStr)
-    if (need_to_wait):
+    if need_to_wait:
         event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
-        capability = event['data']
+        capability = event["data"]
 
     events = ad.ed.pop_all(eventStr)
     for event in events:
-        capability = event['data']
+        capability = event["data"]
 
     return capability
 
+
 def get_ssrdumps(ad):
     """Pulls dumps in the ssrdump dir
     Args:
@@ -2413,8 +2553,7 @@
         log_path = os.path.join(ad.device_log_path, "SSRDUMPS_%s" % ad.serial)
         os.makedirs(log_path, exist_ok=True)
         ad.pull_files(logs, log_path)
-    ad.adb.shell("find /data/vendor/ssrdump/ -type f -delete",
-                 ignore_status=True)
+    ad.adb.shell("find /data/vendor/ssrdump/ -type f -delete", ignore_status=True)
 
 
 def start_pcap(pcap, wifi_band, test_name):
@@ -2430,9 +2569,10 @@
         (pcap Process object, log directory) as the value
     """
     log_dir = os.path.join(
-        context.get_current_context().get_full_output_path(), 'PacketCapture')
+        context.get_current_context().get_full_output_path(), "PacketCapture"
+    )
     os.makedirs(log_dir, exist_ok=True)
-    if wifi_band == 'dual':
+    if wifi_band == "dual":
         bands = [BAND_2G, BAND_5G]
     else:
         bands = [wifi_band]
@@ -2473,8 +2613,10 @@
     for pkt in packets:
         logging.debug("Packet Summary = %s", pkt.summary())
         if mac in pkt.summary():
-            asserts.fail("Device %s caught Factory MAC: %s in packet sniffer."
-                         "Packet = %s" % (ad.serial, mac, pkt.show()))
+            asserts.fail(
+                "Device %s caught Factory MAC: %s in packet sniffer."
+                "Packet = %s" % (ad.serial, mac, pkt.show())
+            )
 
 
 def verify_mac_is_found_in_pcap(ad, mac, packets):
@@ -2488,8 +2630,9 @@
     for pkt in packets:
         if mac in pkt.summary():
             return
-    asserts.fail("Did not find MAC = %s in packet sniffer."
-                 "for device %s" % (mac, ad.serial))
+    asserts.fail(
+        "Did not find MAC = %s in packet sniffer." "for device %s" % (mac, ad.serial)
+    )
 
 
 def start_cnss_diags(ads, cnss_diag_file, pixel_models):
@@ -2512,14 +2655,17 @@
         prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
     else:
         prop = wifi_constants.CNSS_DIAG_PROP
-    if ad.adb.getprop(prop) != 'true':
+    if ad.adb.getprop(prop) != "true":
         if not int(
-                ad.adb.shell("ls -l %s%s | wc -l" %
-                             (CNSS_DIAG_CONFIG_PATH, CNSS_DIAG_CONFIG_FILE))):
+            ad.adb.shell(
+                "ls -l %s%s | wc -l" % (CNSS_DIAG_CONFIG_PATH, CNSS_DIAG_CONFIG_FILE)
+            )
+        ):
             ad.adb.push("%s %s" % (cnss_diag_file, CNSS_DIAG_CONFIG_PATH))
         ad.adb.shell(
             "find /data/vendor/wifi/cnss_diag/wlan_logs/ -type f -delete",
-            ignore_status=True)
+            ignore_status=True,
+        )
         ad.adb.shell("setprop %s true" % prop, ignore_status=True)
 
 
@@ -2559,8 +2705,8 @@
 
 
 LinkProbeResult = namedtuple(
-    'LinkProbeResult',
-    ('is_success', 'stdout', 'elapsed_time', 'failure_reason'))
+    "LinkProbeResult", ("is_success", "stdout", "elapsed_time", "failure_reason")
+)
 
 
 def send_link_probe(ad):
@@ -2572,27 +2718,33 @@
     Returns:
         LinkProbeResult namedtuple
     """
-    stdout = ad.adb.shell('cmd wifi send-link-probe')
-    asserts.assert_false('Error' in stdout or 'Exception' in stdout,
-                         'Exception while sending link probe: ' + stdout)
+    stdout = ad.adb.shell("cmd wifi send-link-probe")
+    asserts.assert_false(
+        "Error" in stdout or "Exception" in stdout,
+        "Exception while sending link probe: " + stdout,
+    )
 
     is_success = False
     elapsed_time = None
     failure_reason = None
-    if 'succeeded' in stdout:
+    if "succeeded" in stdout:
         is_success = True
         elapsed_time = next(
-            (int(token) for token in stdout.split() if token.isdigit()), None)
-    elif 'failed with reason' in stdout:
+            (int(token) for token in stdout.split() if token.isdigit()), None
+        )
+    elif "failed with reason" in stdout:
         failure_reason = next(
-            (int(token) for token in stdout.split() if token.isdigit()), None)
+            (int(token) for token in stdout.split() if token.isdigit()), None
+        )
     else:
-        asserts.fail('Unexpected link probe result: ' + stdout)
+        asserts.fail("Unexpected link probe result: " + stdout)
 
-    return LinkProbeResult(is_success=is_success,
-                           stdout=stdout,
-                           elapsed_time=elapsed_time,
-                           failure_reason=failure_reason)
+    return LinkProbeResult(
+        is_success=is_success,
+        stdout=stdout,
+        elapsed_time=elapsed_time,
+        failure_reason=failure_reason,
+    )
 
 
 def send_link_probes(ad, num_probes, delay_sec):
@@ -2606,13 +2758,13 @@
     Returns:
         List[LinkProbeResult] one LinkProbeResults for each probe
     """
-    logging.info('Sending link probes')
+    logging.info("Sending link probes")
     results = []
     for _ in range(num_probes):
         # send_link_probe() will also fail the test if it sees an exception
         # in the stdout of the adb shell command
         result = send_link_probe(ad)
-        logging.info('link probe results: ' + str(result))
+        logging.info("link probe results: " + str(result))
         results.append(result)
         time.sleep(delay_sec)
 
@@ -2622,17 +2774,17 @@
 def ap_setup(test, index, ap, network, bandwidth=80, channel=6):
     """Set up the AP with provided network info.
 
-        Args:
-            test: the calling test class object.
-            index: int, index of the AP.
-            ap: access_point object of the AP.
-            network: dict with information of the network, including ssid,
-                     password and bssid.
-            bandwidth: the operation bandwidth for the AP, default 80MHz.
-            channel: the channel number for the AP.
-        Returns:
-            brconfigs: the bridge interface configs
-        """
+    Args:
+        test: the calling test class object.
+        index: int, index of the AP.
+        ap: access_point object of the AP.
+        network: dict with information of the network, including ssid,
+                 password and bssid.
+        bandwidth: the operation bandwidth for the AP, default 80MHz.
+        channel: the channel number for the AP.
+    Returns:
+        brconfigs: the bridge interface configs
+    """
     bss_settings = []
     ssid = network[WifiEnums.SSID_KEY]
     test.access_points[index].close()
@@ -2641,18 +2793,19 @@
     # Configure AP as required.
     if "password" in network.keys():
         password = network["password"]
-        security = hostapd_security.Security(security_mode="wpa",
-                                             password=password)
+        security = hostapd_security.Security(security_mode="wpa", password=password)
     else:
         security = hostapd_security.Security(security_mode=None, password=None)
-    config = hostapd_ap_preset.create_ap_preset(channel=channel,
-                                                ssid=ssid,
-                                                security=security,
-                                                bss_settings=bss_settings,
-                                                vht_bandwidth=bandwidth,
-                                                profile_name='whirlwind',
-                                                iface_wlan_2g=ap.wlan_2g,
-                                                iface_wlan_5g=ap.wlan_5g)
+    config = hostapd_ap_preset.create_ap_preset(
+        channel=channel,
+        ssid=ssid,
+        security=security,
+        bss_settings=bss_settings,
+        vht_bandwidth=bandwidth,
+        profile_name="whirlwind",
+        iface_wlan_2g=ap.wlan_2g,
+        iface_wlan_5g=ap.wlan_5g,
+    )
     ap.start_ap(config)
     logging.info("AP started on channel {} with SSID {}".format(channel, ssid))
 
@@ -2663,14 +2816,14 @@
         test: The test class object.
         AP: int, indicating which AP to turn OFF.
     """
-    hostapd_2g = test.access_points[AP - 1]._aps['wlan0'].hostapd
+    hostapd_2g = test.access_points[AP - 1]._aps["wlan0"].hostapd
     if hostapd_2g.is_alive():
         hostapd_2g.stop()
-        logging.debug('Turned WLAN0 AP%d off' % AP)
-    hostapd_5g = test.access_points[AP - 1]._aps['wlan1'].hostapd
+        logging.debug("Turned WLAN0 AP%d off" % AP)
+    hostapd_5g = test.access_points[AP - 1]._aps["wlan1"].hostapd
     if hostapd_5g.is_alive():
         hostapd_5g.stop()
-        logging.debug('Turned WLAN1 AP%d off' % AP)
+        logging.debug("Turned WLAN1 AP%d off" % AP)
 
 
 def turn_ap_on(test, AP):
@@ -2679,14 +2832,14 @@
         test: The test class object.
         AP: int, indicating which AP to turn ON.
     """
-    hostapd_2g = test.access_points[AP - 1]._aps['wlan0'].hostapd
+    hostapd_2g = test.access_points[AP - 1]._aps["wlan0"].hostapd
     if not hostapd_2g.is_alive():
         hostapd_2g.start(hostapd_2g.config)
-        logging.debug('Turned WLAN0 AP%d on' % AP)
-    hostapd_5g = test.access_points[AP - 1]._aps['wlan1'].hostapd
+        logging.debug("Turned WLAN0 AP%d on" % AP)
+    hostapd_5g = test.access_points[AP - 1]._aps["wlan1"].hostapd
     if not hostapd_5g.is_alive():
         hostapd_5g.start(hostapd_5g.config)
-        logging.debug('Turned WLAN1 AP%d on' % AP)
+        logging.debug("Turned WLAN1 AP%d on" % AP)
 
 
 def turn_location_off_and_scan_toggle_off(ad):
@@ -2697,8 +2850,8 @@
     asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
 
 
-def set_softap_channel(dut, ap_iface='wlan1', cs_count=10, channel=2462):
-    """ Set SoftAP mode channel
+def set_softap_channel(dut, ap_iface="wlan1", cs_count=10, channel=2462):
+    """Set SoftAP mode channel
 
     Args:
         dut: android device object
@@ -2706,26 +2859,28 @@
         cs_count: how many beacon frames before switch channel, default = 10
         channel: a wifi channel.
     """
-    chan_switch_cmd = 'hostapd_cli -i {} chan_switch {} {}'
+    chan_switch_cmd = "hostapd_cli -i {} chan_switch {} {}"
     chan_switch_cmd_show = chan_switch_cmd.format(ap_iface, cs_count, channel)
-    dut.log.info('adb shell {}'.format(chan_switch_cmd_show))
+    dut.log.info("adb shell {}".format(chan_switch_cmd_show))
     chan_switch_result = dut.adb.shell(
-        chan_switch_cmd.format(ap_iface, cs_count, channel))
-    if chan_switch_result == 'OK':
-        dut.log.info('switch hotspot channel to {}'.format(channel))
+        chan_switch_cmd.format(ap_iface, cs_count, channel)
+    )
+    if chan_switch_result == "OK":
+        dut.log.info("switch hotspot channel to {}".format(channel))
         return chan_switch_result
 
     asserts.fail("Failed to switch hotspot channel")
 
+
 def get_wlan0_link(dut):
-    """ get wlan0 interface status"""
-    get_wlan0 = 'wpa_cli -iwlan0 -g@android:wpa_wlan0 IFNAME=wlan0 status'
+    """get wlan0 interface status"""
+    get_wlan0 = "wpa_cli -iwlan0 -g@android:wpa_wlan0 IFNAME=wlan0 status"
     out = dut.adb.shell(get_wlan0)
     out = dict(re.findall(r'(\S+)=(".*?"|\S+)', out))
-    asserts.assert_true("ssid" in out,
-                        "Client doesn't connect to any network")
+    asserts.assert_true("ssid" in out, "Client doesn't connect to any network")
     return out
 
+
 def verify_11ax_wifi_connection(ad, wifi6_supported_models, wifi6_ap):
     """Verify 11ax for wifi connection.
 
@@ -2737,8 +2892,10 @@
     if wifi6_ap and ad.model in wifi6_supported_models:
         logging.info("Verifying 11ax. Model: %s" % ad.model)
         asserts.assert_true(
-            ad.droid.wifiGetConnectionStandard() ==
-            wifi_constants.WIFI_STANDARD_11AX, "DUT did not connect to 11ax.")
+            ad.droid.wifiGetConnectionStandard() == wifi_constants.WIFI_STANDARD_11AX,
+            "DUT did not connect to 11ax.",
+        )
+
 
 def verify_11ax_softap(dut, dut_client, wifi6_supported_models):
     """Verify 11ax SoftAp if devices support it.
@@ -2751,14 +2908,21 @@
       dut_client: Client connecting to softap.
       wifi6_supported_models: List of device models supporting 11ax.
     """
-    if dut.model in wifi6_supported_models and dut_client.model in wifi6_supported_models:
+    if (
+        dut.model in wifi6_supported_models
+        and dut_client.model in wifi6_supported_models
+    ):
         logging.info(
             "Verifying 11ax softap. DUT model: %s, DUT Client model: %s",
-            dut.model, dut_client.model)
+            dut.model,
+            dut_client.model,
+        )
         asserts.assert_true(
-            dut_client.droid.wifiGetConnectionStandard() ==
-            wifi_constants.WIFI_STANDARD_11AX,
-            "DUT failed to start SoftAp in 11ax.")
+            dut_client.droid.wifiGetConnectionStandard()
+            == wifi_constants.WIFI_STANDARD_11AX,
+            "DUT failed to start SoftAp in 11ax.",
+        )
+
 
 def check_available_channels_in_bands_2_5(dut, country_code):
     """Check if DUT is capable of enable BridgedAp.
@@ -2782,10 +2946,10 @@
     capability = get_current_softap_capability(dut, callbackId, True)
     dut.droid.unregisterSoftApCallback(callbackId)
 
-    if capability[wifi_constants.
-                  SOFTAP_CAPABILITY_24GHZ_SUPPORTED_CHANNEL_LIST] and \
-        capability[wifi_constants.
-                   SOFTAP_CAPABILITY_5GHZ_SUPPORTED_CHANNEL_LIST]:
+    if (
+        capability[wifi_constants.SOFTAP_CAPABILITY_24GHZ_SUPPORTED_CHANNEL_LIST]
+        and capability[wifi_constants.SOFTAP_CAPABILITY_5GHZ_SUPPORTED_CHANNEL_LIST]
+    ):
         return True
     return False
 
@@ -2802,20 +2966,21 @@
     dut1_ip = ""
     dut2_ip = ""
     try:
-        dut1_ip = dut1.droid.connectivityGetIPv4Addresses('wlan0')[0]
+        dut1_ip = dut1.droid.connectivityGetIPv4Addresses("wlan0")[0]
     except IndexError as e:
         dut1.log.info(
-            "{} has no Wi-Fi connection, cannot get IPv4 address."
-            .format(dut1.serial))
+            "{} has no Wi-Fi connection, cannot get IPv4 address.".format(dut1.serial)
+        )
     try:
-        dut2_ip = dut2.droid.connectivityGetIPv4Addresses('wlan0')[0]
+        dut2_ip = dut2.droid.connectivityGetIPv4Addresses("wlan0")[0]
     except IndexError as e:
         dut2.log.info(
-            "{} has no Wi-Fi connection, cannot get IPv4 address."
-            .format(dut2.serial))
+            "{} has no Wi-Fi connection, cannot get IPv4 address.".format(dut2.serial)
+        )
     # Test fail if not able to obtain two DUT's IPv4 addresses.
-    asserts.assert_true(dut1_ip and dut2_ip,
-                        "Ping failed because no DUT's IPv4 address")
+    asserts.assert_true(
+        dut1_ip and dut2_ip, "Ping failed because no DUT's IPv4 address"
+    )
 
     dut1.log.info("{} IPv4 addresses : {}".format(dut1.serial, dut1_ip))
     dut2.log.info("{} IPv4 addresses : {}".format(dut2.serial, dut2_ip))
@@ -2823,13 +2988,12 @@
     # Two clients ping each other
     dut1.log.info("{} ping {}".format(dut1_ip, dut2_ip))
     asserts.assert_true(
-        utils.adb_shell_ping(dut1, count=10, dest_ip=dut2_ip,
-                             timeout=20),
-        "%s ping %s failed" % (dut1.serial, dut2_ip))
+        utils.adb_shell_ping(dut1, count=10, dest_ip=dut2_ip, timeout=20),
+        "%s ping %s failed" % (dut1.serial, dut2_ip),
+    )
 
     dut2.log.info("{} ping {}".format(dut2_ip, dut1_ip))
     asserts.assert_true(
-        utils.adb_shell_ping(dut2, count=10, dest_ip=dut1_ip,
-                             timeout=20),
-        "%s ping %s failed" % (dut2.serial, dut1_ip))
-
+        utils.adb_shell_ping(dut2, count=10, dest_ip=dut1_ip, timeout=20),
+        "%s ping %s failed" % (dut2.serial, dut1_ip),
+    )
diff --git a/src/antlion/test_utils_tests/__init__.py b/src/antlion/test_utils_tests/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/acts_import_test_utils_test.py b/src/antlion/test_utils_tests/acts_import_test_utils_test.py
deleted file mode 100755
index 4e2ce49..0000000
--- a/src/antlion/test_utils_tests/acts_import_test_utils_test.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-
-class ActsImportTestUtilsTest(unittest.TestCase):
-    """This test class has unit tests for the implementation of everything
-    under acts_contrib.test_utils.*
-    """
-
-    def test_import_successful(self):
-        """ Test to return true if all imports were successful.
-
-        This test will fail if any import was unsuccessful.
-        """
-        try:
-            from antlion import utils
-
-            from antlion.test_utils.bt import BleEnum
-            from antlion.test_utils.bt import BluetoothBaseTest
-            from antlion.test_utils.bt import BluetoothCarHfpBaseTest
-            from antlion.test_utils.bt import BtEnum
-            from antlion.test_utils.bt import GattConnectedBaseTest
-            from antlion.test_utils.bt import GattEnum
-            from antlion.test_utils.bt import bt_contacts_utils
-            from antlion.test_utils.bt import bt_gatt_utils
-            from antlion.test_utils.bt import bt_test_utils
-            from antlion.test_utils.bt import native_bt_test_utils
-
-            from antlion.test_utils.net import connectivity_const
-            from antlion.test_utils.net import connectivity_const
-
-            from antlion.test_utils.wifi import wifi_constants
-            from antlion.test_utils.wifi import wifi_test_utils
-
-        except Exception:
-            self.fail('Unable to import all supported test_utils')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/audio_analysis_integrationtest.py b/src/antlion/test_utils_tests/audio_analysis_integrationtest.py
deleted file mode 100644
index e7c17a2..0000000
--- a/src/antlion/test_utils_tests/audio_analysis_integrationtest.py
+++ /dev/null
@@ -1,360 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Note: This test has been labelled as an integration test due to its use of
-# real data, and the five to six second execution time.
-import logging
-import numpy
-import os
-import unittest
-
-import antlion.test_utils.audio_analysis_lib.audio_analysis as audio_analysis
-import antlion.test_utils.audio_analysis_lib.audio_data as audio_data
-
-
-class SpectralAnalysisTest(unittest.TestCase):
-    def setUp(self):
-        """Uses the same seed to generate noise for each test."""
-        numpy.random.seed(0)
-
-    def dummy_peak_detection(self, array, window_size):
-        """Detects peaks in an array in simple way.
-
-        A point (i, array[i]) is a peak if array[i] is the maximum among
-        array[i - half_window_size] to array[i + half_window_size].
-        If array[i - half_window_size] to array[i + half_window_size] are all
-        equal, then there is no peak in this window.
-
-        Args:
-            array: The input array to detect peaks in. Array is a list of
-                absolute values of the magnitude of transformed coefficient.
-            window_size: The window to detect peaks.
-
-        Returns:
-            A list of tuples:
-                [(peak_index_1, peak_value_1), (peak_index_2, peak_value_2),
-                ...]
-                where the tuples are sorted by peak values.
-
-        """
-        half_window_size = window_size / 2
-        length = len(array)
-
-        def mid_is_peak(array, mid, left, right):
-            """Checks if value at mid is the largest among left to right.
-
-            Args:
-                array: A list of numbers.
-                mid: The mid index.
-                left: The left index.
-                rigth: The right index.
-
-            Returns:
-                True if array[index] is the maximum among numbers in array
-                    between index [left, right] inclusively.
-
-            """
-            value_mid = array[int(mid)]
-            for index in range(int(left), int(right) + 1):
-                if index == mid:
-                    continue
-                if array[index] >= value_mid:
-                    return False
-            return True
-
-        results = []
-        for mid in range(length):
-            left = max(0, mid - half_window_size)
-            right = min(length - 1, mid + half_window_size)
-            if mid_is_peak(array, mid, left, right):
-                results.append((mid, array[int(mid)]))
-
-        # Sort the peaks by values.
-        return sorted(results, key=lambda x: x[1], reverse=True)
-
-    def test_peak_detection(self):
-        array = [0, 1, 2, 3, 4, 3, 2, 1, 0, 1, 2, 3, 5, 3, 2, 1, 1, 1, 1, 1]
-        result = audio_analysis.peak_detection(array, 4)
-        golden_answer = [(12, 5), (4, 4)]
-        self.assertEqual(result, golden_answer)
-
-    def test_peak_detection_large(self):
-        array = numpy.random.uniform(0, 1, 1000000)
-        window_size = 100
-        logging.debug('Test large array using dummy peak detection')
-        dummy_answer = self.dummy_peak_detection(array, window_size)
-        logging.debug('Test large array using improved peak detection')
-        improved_answer = audio_analysis.peak_detection(array, window_size)
-        logging.debug('Compare the result')
-        self.assertEqual(dummy_answer, improved_answer)
-
-    def test_spectral_analysis(self):
-        rate = 48000
-        length_in_secs = 0.5
-        freq_1 = 490.0
-        freq_2 = 60.0
-        coeff_1 = 1
-        coeff_2 = 0.3
-        samples = length_in_secs * rate
-        noise = numpy.random.standard_normal(int(samples)) * 0.005
-        x = numpy.linspace(0.0, (samples - 1) * 1.0 / rate, samples)
-        y = (coeff_1 * numpy.sin(freq_1 * 2.0 * numpy.pi * x) + coeff_2 *
-             numpy.sin(freq_2 * 2.0 * numpy.pi * x)) + noise
-        results = audio_analysis.spectral_analysis(y, rate)
-        # Results should contains
-        # [(490, 1*k), (60, 0.3*k), (0, 0.1*k)] where 490Hz is the dominant
-        # frequency with coefficient 1, 60Hz is the second dominant frequency
-        # with coefficient 0.3, 0Hz is from Gaussian noise with coefficient
-        # around 0.1. The k constant is resulted from window function.
-        logging.debug('Results: %s', results)
-        self.assertTrue(abs(results[0][0] - freq_1) < 1)
-        self.assertTrue(abs(results[1][0] - freq_2) < 1)
-        self.assertTrue(
-            abs(results[0][1] / results[1][1] - coeff_1 / coeff_2) < 0.01)
-
-    def test_spectral_snalysis_real_data(self):
-        """This unittest checks the spectral analysis works on real data."""
-        file_path = os.path.join(
-            os.path.dirname(__file__), '../../../acts/framework/tests/test_data', '1k_2k.raw')
-        binary = open(file_path, 'rb').read()
-        data = audio_data.AudioRawData(binary, 2, 'S32_LE')
-        saturate_value = audio_data.get_maximum_value_from_sample_format(
-            'S32_LE')
-        golden_frequency = [1000, 2000]
-        for channel in [0, 1]:
-            normalized_signal = audio_analysis.normalize_signal(
-                data.channel_data[channel], saturate_value)
-            spectral = audio_analysis.spectral_analysis(normalized_signal,
-                                                        48000, 0.02)
-            logging.debug('channel %s: %s', channel, spectral)
-            self.assertTrue(
-                abs(spectral[0][0] - golden_frequency[channel]) < 5,
-                'Dominant frequency is not correct')
-
-    def test_not_meaningful_data(self):
-        """Checks that sepectral analysis handles un-meaningful data."""
-        rate = 48000
-        length_in_secs = 0.5
-        samples = length_in_secs * rate
-        noise_amplitude = audio_analysis.MEANINGFUL_RMS_THRESHOLD * 0.5
-        noise = numpy.random.standard_normal(int(samples)) * noise_amplitude
-        results = audio_analysis.spectral_analysis(noise, rate)
-        self.assertEqual([(0, 0)], results)
-
-    def testEmptyData(self):
-        """Checks that sepectral analysis rejects empty data."""
-        with self.assertRaises(audio_analysis.EmptyDataError):
-            results = audio_analysis.spectral_analysis([], 100)
-
-
-class NormalizeTest(unittest.TestCase):
-    def test_normalize(self):
-        y = [1, 2, 3, 4, 5]
-        normalized_y = audio_analysis.normalize_signal(y, 10)
-        expected = numpy.array([0.1, 0.2, 0.3, 0.4, 0.5])
-        for i in range(len(y)):
-            self.assertEqual(expected[i], normalized_y[i])
-
-
-class AnomalyTest(unittest.TestCase):
-    def setUp(self):
-        """Creates a test signal of sine wave."""
-        # Use the same seed for each test case.
-        numpy.random.seed(0)
-
-        self.block_size = 120
-        self.rate = 48000
-        self.freq = 440
-        length_in_secs = 0.25
-        self.samples = length_in_secs * self.rate
-        x = numpy.linspace(0.0, (self.samples - 1) * 1.0 / self.rate,
-                           self.samples)
-        self.y = numpy.sin(self.freq * 2.0 * numpy.pi * x)
-
-    def add_noise(self):
-        """Add noise to the test signal."""
-        noise_amplitude = 0.3
-        noise = numpy.random.standard_normal(len(self.y)) * noise_amplitude
-        self.y = self.y + noise
-
-    def insert_anomaly(self):
-        """Inserts an anomaly to the test signal.
-
-        The anomaly self.anomaly_samples should be created before calling this
-        method.
-
-        """
-        self.anomaly_start_secs = 0.1
-        self.y = numpy.insert(self.y,
-                              int(self.anomaly_start_secs * self.rate),
-                              self.anomaly_samples)
-
-    def generate_skip_anomaly(self):
-        """Skips a section of test signal."""
-        self.anomaly_start_secs = 0.1
-        self.anomaly_duration_secs = 0.005
-        anomaly_append_secs = self.anomaly_start_secs + self.anomaly_duration_secs
-        anomaly_start_index = self.anomaly_start_secs * self.rate
-        anomaly_append_index = anomaly_append_secs * self.rate
-        self.y = numpy.append(self.y[:int(anomaly_start_index)],
-                              self.y[int(anomaly_append_index):])
-
-    def create_constant_anomaly(self, amplitude):
-        """Creates an anomaly of constant samples.
-
-        Args:
-            amplitude: The amplitude of the constant samples.
-
-        """
-        self.anomaly_duration_secs = 0.005
-        self.anomaly_samples = ([amplitude] *
-                                int(self.anomaly_duration_secs * self.rate))
-
-    def run_analysis(self):
-        """Runs the anomaly detection."""
-        self.results = audio_analysis.anomaly_detection(
-            self.y, self.rate, self.freq, self.block_size)
-        logging.debug('Results: %s', self.results)
-
-    def check_no_anomaly(self):
-        """Verifies that there is no anomaly in detection result."""
-        self.run_analysis()
-        self.assertFalse(self.results)
-
-    def check_anomaly(self):
-        """Verifies that there is anomaly in detection result.
-
-        The detection result should contain anomaly time stamps that are
-        close to where anomaly was inserted. There can be multiple anomalies
-        since the detection depends on the block size.
-
-        """
-        self.run_analysis()
-        self.assertTrue(self.results)
-        # Anomaly can be detected as long as the detection window of block size
-        # overlaps with anomaly.
-        expected_detected_range_secs = (
-            self.anomaly_start_secs - float(self.block_size) / self.rate,
-            self.anomaly_start_secs + self.anomaly_duration_secs)
-        for detected_secs in self.results:
-            self.assertTrue(detected_secs <= expected_detected_range_secs[1])
-            self.assertTrue(detected_secs >= expected_detected_range_secs[0])
-
-    def test_good_signal(self):
-        """Sine wave signal with no noise or anomaly."""
-        self.check_no_anomaly()
-
-    def test_good_signal_noise(self):
-        """Sine wave signal with noise."""
-        self.add_noise()
-        self.check_no_anomaly()
-
-    def test_zero_anomaly(self):
-        """Sine wave signal with no noise but with anomaly.
-
-        This test case simulates underrun in digital data where there will be
-        one block of samples with 0 amplitude.
-
-        """
-        self.create_constant_anomaly(0)
-        self.insert_anomaly()
-        self.check_anomaly()
-
-    def test_zero_anomaly_noise(self):
-        """Sine wave signal with noise and anomaly.
-
-        This test case simulates underrun in analog data where there will be
-        one block of samples with amplitudes close to 0.
-
-        """
-        self.create_constant_anomaly(0)
-        self.insert_anomaly()
-        self.add_noise()
-        self.check_anomaly()
-
-    def test_low_constant_anomaly(self):
-        """Sine wave signal with low constant anomaly.
-
-        The anomaly is one block of constant values.
-
-        """
-        self.create_constant_anomaly(0.05)
-        self.insert_anomaly()
-        self.check_anomaly()
-
-    def test_low_constant_anomaly_noise(self):
-        """Sine wave signal with low constant anomaly and noise.
-
-        The anomaly is one block of constant values.
-
-        """
-        self.create_constant_anomaly(0.05)
-        self.insert_anomaly()
-        self.add_noise()
-        self.check_anomaly()
-
-    def test_high_constant_anomaly(self):
-        """Sine wave signal with high constant anomaly.
-
-        The anomaly is one block of constant values.
-
-        """
-        self.create_constant_anomaly(2)
-        self.insert_anomaly()
-        self.check_anomaly()
-
-    def test_high_constant_anomaly_noise(self):
-        """Sine wave signal with high constant anomaly and noise.
-
-        The anomaly is one block of constant values.
-
-        """
-        self.create_constant_anomaly(2)
-        self.insert_anomaly()
-        self.add_noise()
-        self.check_anomaly()
-
-    def test_skipped_anomaly(self):
-        """Sine wave signal with skipped anomaly.
-
-        The anomaly simulates the symptom where a block is skipped.
-
-        """
-        self.generate_skip_anomaly()
-        self.check_anomaly()
-
-    def test_skipped_anomaly_noise(self):
-        """Sine wave signal with skipped anomaly with noise.
-
-        The anomaly simulates the symptom where a block is skipped.
-
-        """
-        self.generate_skip_anomaly()
-        self.add_noise()
-        self.check_anomaly()
-
-    def test_empty_data(self):
-        """Checks that anomaly detection rejects empty data."""
-        self.y = []
-        with self.assertRaises(audio_analysis.EmptyDataError):
-            self.check_anomaly()
-
-
-if __name__ == '__main__':
-    logging.basicConfig(
-        level=logging.DEBUG,
-        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/audio_quality_measurement_integrationtest.py b/src/antlion/test_utils_tests/audio_quality_measurement_integrationtest.py
deleted file mode 100644
index 824bd60..0000000
--- a/src/antlion/test_utils_tests/audio_quality_measurement_integrationtest.py
+++ /dev/null
@@ -1,267 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Note: This test has been labelled as an integration test due to its use of
-# real data, and the 12+ second execution time. It also generates sine waves
-# during the test, rather than using data that has been pre-calculated.
-
-import math
-import numpy
-import unittest
-
-import antlion.test_utils.audio_analysis_lib.audio_quality_measurement as audio_quality_measurement
-
-
-class NoiseLevelTest(unittest.TestCase):
-    def setUp(self):
-        """Uses the same seed to generate noise for each test."""
-        numpy.random.seed(0)
-
-    def test_noise_level(self):
-        # Generates the standard sin wave with standard_noise portion of noise.
-        rate = 48000
-        length_in_secs = 2
-        frequency = 440
-        amplitude = 1
-        standard_noise = 0.05
-
-        wave = []
-        for index in range(0, rate * length_in_secs):
-            phase = 2.0 * math.pi * frequency * float(index) / float(rate)
-            sine_wave = math.sin(phase)
-            noise = standard_noise * numpy.random.standard_normal()
-            wave.append(float(amplitude) * (sine_wave + noise))
-
-        # Calculates the average value after applying teager operator.
-        teager_value_of_wave, length = 0, len(wave)
-        for i in range(1, length - 1):
-            ith_teager_value = abs(wave[i] * wave[i] - wave[i - 1] * wave[i +
-                                                                          1])
-            ith_teager_value *= max(1, abs(wave[i]))
-            teager_value_of_wave += ith_teager_value
-        teager_value_of_wave /= float(length * (amplitude**2))
-
-        noise = audio_quality_measurement.noise_level(
-            amplitude, frequency, rate, teager_value_of_wave)
-
-        self.assertTrue(abs(noise - standard_noise) < 0.01)
-
-
-class ErrorTest(unittest.TestCase):
-    def test_error(self):
-        value1 = [0.2, 0.4, 0.1, 0.01, 0.01, 0.01]
-        value2 = [0.3, 0.3, 0.08, 0.0095, 0.0098, 0.0099]
-        error = [0.5, 0.25, 0.2, 0.05, 0.02, 0.01]
-        for i in range(len(value1)):
-            ret = audio_quality_measurement.error(value1[i], value2[i])
-            self.assertTrue(abs(ret - error[i]) < 0.001)
-
-
-class QualityMeasurementTest(unittest.TestCase):
-    def setUp(self):
-        """Creates a test signal of sine wave."""
-        numpy.random.seed(0)
-
-        self.rate = 48000
-        self.freq = 440
-        self.amplitude = 1
-        length_in_secs = 2
-        self.samples = length_in_secs * self.rate
-        self.y = []
-        for index in range(self.samples):
-            phase = 2.0 * math.pi * self.freq * float(index) / float(self.rate)
-            sine_wave = math.sin(phase)
-            self.y.append(float(self.amplitude) * sine_wave)
-
-    def add_noise(self):
-        """Adds noise to the test signal."""
-        noise_amplitude = 0.01 * self.amplitude
-        for index in range(self.samples):
-            noise = noise_amplitude * numpy.random.standard_normal()
-            self.y[index] += noise
-
-    def generate_delay(self):
-        """Generates some delays during playing."""
-        self.delay_start_time = [0.200, 0.375, 0.513, 0.814, 1.000, 1.300]
-        self.delay_end_time = [0.201, 0.377, 0.516, 0.824, 1.100, 1.600]
-
-        for i in range(len(self.delay_start_time)):
-            start_index = int(self.delay_start_time[i] * self.rate)
-            end_index = int(self.delay_end_time[i] * self.rate)
-            for j in range(start_index, end_index):
-                self.y[j] = 0
-
-    def generate_artifacts_before_playback(self):
-        """Generates artifacts before playing."""
-        silence_before_playback_end_time = 0.2
-        end_index = int(silence_before_playback_end_time * self.rate)
-        for i in range(0, end_index):
-            self.y[i] = 0
-        noise_start_index = int(0.1 * self.rate)
-        noise_end_index = int(0.1005 * self.rate)
-        for i in range(noise_start_index, noise_end_index):
-            self.y[i] = 3 * self.amplitude
-
-    def generate_artifacts_after_playback(self):
-        """Generates artifacts after playing."""
-        silence_after_playback_start_time = int(1.9 * self.rate)
-        noise_start_index = int(1.95 * self.rate)
-        noise_end_index = int((1.95 + 0.02) * self.rate)
-
-        for i in range(silence_after_playback_start_time, self.samples):
-            self.y[i] = 0
-        for i in range(noise_start_index, noise_end_index):
-            self.y[i] = self.amplitude
-
-    def generate_burst_during_playback(self):
-        """Generates bursts during playing."""
-        self.burst_start_time = [0.300, 0.475, 0.613, 0.814, 1.300]
-        self.burst_end_time = [0.301, 0.476, 0.614, 0.815, 1.301]
-
-        for i in range(len(self.burst_start_time)):
-            start_index = int(self.burst_start_time[i] * self.rate)
-            end_index = int(self.burst_end_time[i] * self.rate)
-            for j in range(start_index, end_index):
-                self.y[j] = self.amplitude * (3 + numpy.random.uniform(-1, 1))
-
-    def generate_volume_changing(self):
-        """Generates volume changing during playing."""
-        start_time = [0.300, 1.400]
-        end_time = [0.600, 1.700]
-        for i in range(len(start_time)):
-            start_index = int(start_time[i] * self.rate)
-            end_index = int(end_time[i] * self.rate)
-            for j in range(start_index, end_index):
-                self.y[j] *= 1.4
-        self.volume_changing = [+1, -1, +1, -1]
-        self.volume_changing_time = [0.3, 0.6, 1.4, 1.7]
-
-    def test_good_signal(self):
-        """Sine wave signal with no noise or artifacts."""
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(len(result['volume_changes']) == 0)
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-
-    def test_good_signal_with_noise(self):
-        """Sine wave signal with noise."""
-        self.add_noise()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(len(result['volume_changes']) == 0)
-        self.assertTrue(0.009 < result['equivalent_noise_level'] < 0.011)
-
-    def test_delay(self):
-        """Sine wave with delay during playing."""
-        self.generate_delay()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(
-            len(result['volume_changes']) == 2 * len(self.delay_start_time))
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-
-        self.assertTrue(
-            len(result['artifacts']['delay_during_playback']) ==
-            len(self.delay_start_time))
-        for i in range(len(result['artifacts']['delay_during_playback'])):
-            delta = abs(result['artifacts']['delay_during_playback'][i][0] -
-                        self.delay_start_time[i])
-            self.assertTrue(delta < 0.001)
-            duration = self.delay_end_time[i] - self.delay_start_time[i]
-            delta = abs(result['artifacts']['delay_during_playback'][i][1] -
-                        duration)
-            self.assertTrue(delta < 0.001)
-
-    def test_artifacts_before_playback(self):
-        """Sine wave with artifacts before playback."""
-        self.generate_artifacts_before_playback()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 1)
-        delta = abs(result['artifacts']['noise_before_playback'][0][0] - 0.1)
-        self.assertTrue(delta < 0.01)
-        delta = abs(result['artifacts']['noise_before_playback'][0][1] - 0.005)
-        self.assertTrue(delta < 0.004)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(len(result['volume_changes']) == 0)
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-
-    def test_artifacts_after_playback(self):
-        """Sine wave with artifacts after playback."""
-        self.generate_artifacts_after_playback()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 1)
-        delta = abs(result['artifacts']['noise_after_playback'][0][0] - 1.95)
-        self.assertTrue(delta < 0.01)
-        delta = abs(result['artifacts']['noise_after_playback'][0][1] - 0.02)
-        self.assertTrue(delta < 0.001)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(len(result['volume_changes']) == 0)
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-
-    def test_burst_during_playback(self):
-        """Sine wave with burst during playback."""
-        self.generate_burst_during_playback()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 5)
-        self.assertTrue(len(result['volume_changes']) == 10)
-        self.assertTrue(result['equivalent_noise_level'] > 0.02)
-        for i in range(len(result['artifacts']['burst_during_playback'])):
-            delta = abs(self.burst_start_time[i] - result['artifacts'][
-                'burst_during_playback'][i])
-            self.assertTrue(delta < 0.002)
-
-    def test_volume_changing(self):
-        """Sine wave with volume changing during playback."""
-        self.generate_volume_changing()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-        self.assertTrue(
-            len(result['volume_changes']) == len(self.volume_changing))
-        for i in range(len(self.volume_changing)):
-            self.assertTrue(
-                abs(self.volume_changing_time[i] - result['volume_changes'][i][
-                    0]) < 0.01)
-            self.assertTrue(
-                self.volume_changing[i] == result['volume_changes'][i][1])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/instrumentation/__init__.py b/src/antlion/test_utils_tests/instrumentation/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/instrumentation/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/instrumentation/device/__init__.py b/src/antlion/test_utils_tests/instrumentation/device/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/instrumentation/device/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/instrumentation/device/command/__init__.py b/src/antlion/test_utils_tests/instrumentation/device/command/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/instrumentation/device/command/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/instrumentation/device/command/instrumentation_command_builder_test.py b/src/antlion/test_utils_tests/instrumentation/device/command/instrumentation_command_builder_test.py
deleted file mode 100755
index 6afae91..0000000
--- a/src/antlion/test_utils_tests/instrumentation/device/command/instrumentation_command_builder_test.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.test_utils.instrumentation.device.command.instrumentation_command_builder import InstrumentationCommandBuilder
-from antlion.test_utils.instrumentation.device.command.instrumentation_command_builder import InstrumentationTestCommandBuilder
-
-
-class InstrumentationCommandBuilderTest(unittest.TestCase):
-
-    def test__runner_and_manifest_package_definition(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_manifest_package('package')
-        builder.set_runner('runner')
-        call = builder.build()
-        self.assertIn('package/runner', call)
-
-    def test__manifest_package_must_be_defined(self):
-        builder = InstrumentationCommandBuilder()
-
-        with self.assertRaisesRegex(Exception, '.*package cannot be none.*'):
-            builder.build()
-
-    def test__runner_must_be_defined(self):
-        builder = InstrumentationCommandBuilder()
-
-        with self.assertRaisesRegex(Exception, '.*runner cannot be none.*'):
-            builder.build()
-
-    def test__output_as_proto(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.set_proto_path()
-
-        call = builder.build()
-        self.assertIn('-f', call)
-
-    def test__proto_flag_with_set_proto_path(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.set_proto_path('/some/proto/path')
-
-        call = builder.build()
-        self.assertIn('-f', call)
-        self.assertIn('/some/proto/path', call)
-
-    def test__set_output_as_text_clears_proto_options(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.set_proto_path('/some/proto/path')
-        builder.set_output_as_text()
-
-        call = builder.build()
-        self.assertNotIn('-f', call)
-        self.assertNotIn('/some/proto/path', call)
-
-    def test__set_nohup(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.set_nohup()
-
-        call = builder.build()
-        self.assertEqual(
-            call, 'nohup am instrument some.manifest.package/runner >> '
-                  '$EXTERNAL_STORAGE/instrumentation_output.txt 2>&1')
-
-    def test__key_value_param_definition(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-
-        builder.add_key_value_param('my_key_1', 'my_value_1')
-        builder.add_key_value_param('my_key_2', 'my_value_2')
-
-        call = builder.build()
-        self.assertIn('-e my_key_1 my_value_1', call)
-        self.assertIn('-e my_key_2 my_value_2', call)
-
-    def test__flags(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-
-        builder.add_flag('--flag1')
-        builder.add_flag('--flag2')
-
-        call = builder.build()
-        self.assertIn('--flag1', call)
-        self.assertIn('--flag2', call)
-
-    def test__remove_flags(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-
-        builder.add_flag('--flag1')
-        builder.add_flag('--flag2')
-        builder.remove_flag('--flag1')
-
-        call = builder.build()
-        self.assertNotIn('--flag1', call)
-        self.assertIn('--flag2', call)
-
-
-class InstrumentationTestCommandBuilderTest(unittest.TestCase):
-    """Test class for
-    acts_contrib/test_utils/instrumentation/instrumentation_call_builder.py
-    """
-
-    def test__test_packages_can_not_be_added_if_classes_were_added_first(self):
-        builder = InstrumentationTestCommandBuilder()
-        builder.add_test_class('some.tests.Class')
-
-        with self.assertRaisesRegex(Exception, '.*only a list of classes.*'):
-            builder.add_test_package('some.tests.package')
-
-    def test__test_classes_can_not_be_added_if_packages_were_added_first(self):
-        builder = InstrumentationTestCommandBuilder()
-        builder.add_test_package('some.tests.package')
-
-        with self.assertRaisesRegex(Exception, '.*only a list of classes.*'):
-            builder.add_test_class('some.tests.Class')
-
-    def test__test_classes_and_test_methods_can_be_combined(self):
-        builder = InstrumentationTestCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.add_test_class('some.tests.Class1')
-        builder.add_test_method('some.tests.Class2', 'favoriteTestMethod')
-
-        call = builder.build()
-        self.assertIn('some.tests.Class1', call)
-        self.assertIn('some.tests.Class2', call)
-        self.assertIn('favoriteTestMethod', call)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/__init__.py b/src/antlion/test_utils_tests/power/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/power/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/power/tel/__init__.py b/src/antlion/test_utils_tests/power/tel/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/power/tel/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/power/tel/lab/consume_parameter_test.py b/src/antlion/test_utils_tests/power/tel/lab/consume_parameter_test.py
deleted file mode 100644
index ca26e53..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/consume_parameter_test.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-import mobly.config_parser as mobly_config_parser
-
-from unittest import mock
-
-
-class ConsumeParameterTest(unittest.TestCase):
-    """ Unit tests for testing the consumption of test name parameters
-      for instances of PowerCellularLabBaseTest
-    """
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-    def test_consume_parameter_typical_case(self):
-        """ Tests the typical case: The parameter is available
-            for consumption and it has enough values
-        """
-        parameters = ['param1', 1, 2, 'param2', 3, 'param3', 'value']
-        expected = ['param2', 3]
-        self.test.unpack_userparams(parameters=parameters)
-        try:
-            result = self.test.consume_parameter('param2', 1)
-            self.assertTrue(
-                result == expected,
-                'Consume parameter did not return the expected result')
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_consume_parameter_returns_empty_when_parameter_unavailabe(self):
-        """ Tests the case where the requested parameter is unavailable
-            for consumption. In this case, a ValueError should be raised
-        """
-        parameters = ['param1', 1, 2]
-        expected = []
-        self.test.unpack_userparams(parameters=parameters)
-        try:
-            result = self.test.consume_parameter('param2', 1)
-            self.assertTrue(
-                result == expected,
-                'Consume parameter should return empty list for an invalid key'
-            )
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_consume_parameter_throws_when_requesting_too_many_parameters(
-            self):
-        """ Tests the case where the requested parameter is available
-            for consumption, but too many values are requested
-        """
-        parameters = ['param1', 1, 2]
-        self.test.unpack_userparams(parameters=parameters)
-        with self.assertRaises(ValueError):
-            self.test.consume_parameter('param1', 3)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/ensure_valid_calibration_table_test.py b/src/antlion/test_utils_tests/power/tel/lab/ensure_valid_calibration_table_test.py
deleted file mode 100644
index 1aa9361..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/ensure_valid_calibration_table_test.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-import mobly.config_parser as mobly_config_parser
-
-
-class EnsureValidCalibrationTableTest(unittest.TestCase):
-    """ Unit tests for exercising the logic of ensure_valid_calibration_table
-        for instances of PowerCellularLabBaseTest
-    """
-
-    VALID_CALIBRATION_TABLE = {'1': {'2': {'3': 123, '4': 3.14}}, '2': 45.67}
-
-    INVALID_CALIBRATION_TABLE = invalid = {'1': {'a': 'invalid'}, '2': 1234}
-
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-
-    def _assert_no_exception(self, func, *args, **kwargs):
-        try:
-            func(*args, **kwargs)
-        except Exception as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def _assert_calibration_table_passes(self, table):
-        self._assert_no_exception(self.test.ensure_valid_calibration_table, table)
-
-    def _assert_calibration_table_fails(self, table):
-        with self.assertRaises(TypeError):
-            self.test.ensure_valid_calibration_table(table)
-
-    def test_ensure_valid_calibration_table_passes_with_empty_table(self):
-        """ Ensure that empty calibration tables are invalid """
-        self._assert_calibration_table_passes({})
-
-    def test_ensure_valid_calibration_table_passes_with_valid_table(self):
-        """ Ensure that valid calibration tables throw no error """
-        self._assert_calibration_table_passes(self.VALID_CALIBRATION_TABLE)
-
-    def test_ensure_valid_calibration_table_fails_with_invalid_data(self):
-        """ Ensure that calibration tables with invalid entries throw an error """
-        self._assert_calibration_table_fails(self.INVALID_CALIBRATION_TABLE)
-
-    def test_ensure_valid_calibration_table_fails_with_none(self):
-        """ Ensure an exception is thrown if no calibration table is given """
-        self._assert_calibration_table_fails(None)
-
-    def test_ensure_valid_calibration_table_fails_with_invalid_type(self):
-        """ Ensure an exception is thrown if no calibration table is given """
-        self._assert_calibration_table_fails([])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/init_simulation_test.py b/src/antlion/test_utils_tests/power/tel/lab/init_simulation_test.py
deleted file mode 100644
index c7c4285..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/init_simulation_test.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-import mobly.config_parser as mobly_config_parser
-from antlion.controllers.cellular_lib.LteSimulation import LteSimulation
-from antlion.controllers.cellular_lib.UmtsSimulation import UmtsSimulation
-
-
-class InitSimulationTest(unittest.TestCase):
-    """ Unit tests for ensuring the simulation is correctly
-        initialized for instances of PowerCellularLabBaseTest
-    """
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-    def test_init_simulation_reuses_simulation_if_same_type(self):
-        """ Ensure that a new simulation is not instantiated if
-            the type is the same as the last simulation
-        """
-        mock_lte_sim = mock.Mock(spec=LteSimulation)
-        self.test.unpack_userparams(simulation=mock_lte_sim)
-        try:
-            self.test.init_simulation(self.PCBT.PARAM_SIM_TYPE_LTE)
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-        self.assertTrue(self.test.simulation is mock_lte_sim,
-                        'A new simulation was instantiated')
-
-    def test_init_simulation_does_not_reuse_simulation_if_different_type(self):
-        """ Ensure that a new simulation is instantiated if
-            the type is different from the last simulation
-        """
-        self.test.unpack_userparams(simulation=mock.Mock(spec=LteSimulation),
-                               test_params=mock.Mock())
-        try:
-            with mock.patch.object(UmtsSimulation,
-                                   '__init__',
-                                   return_value=None) as mock_init:
-                self.test.init_simulation(self.PCBT.PARAM_SIM_TYPE_UMTS)
-        except Exception as e:
-            self.fail('Error thrown: {}'.format(e))
-        self.assertTrue(mock_init.called,
-                        'A new simulation was not instantiated')
-
-    def test_init_simulation_throws_error_with_invalid_simulation_type(self):
-        """ Ensure that a new simulation is not instantiated if
-            the type is invalid
-        """
-        self.test.unpack_userparams(simulation=mock.Mock(spec=LteSimulation))
-        with self.assertRaises(ValueError):
-            self.test.init_simulation('Invalid simulation type')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/initialize_simulator_test.py b/src/antlion/test_utils_tests/power/tel/lab/initialize_simulator_test.py
deleted file mode 100644
index cd23f4b..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/initialize_simulator_test.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-import mobly.config_parser as mobly_config_parser
-from antlion.controllers.anritsu_lib import md8475_cellular_simulator as anritsu
-from antlion.controllers.rohdeschwarz_lib import cmw500_cellular_simulator as cmw
-
-
-class InitializeSimulatorTest(unittest.TestCase):
-    """ Unit tests for ensuring the simulator is correctly
-        initialized for instances of PowerCellularLabBaseTest
-    """
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-    def test_initialize_simulator_md8475_A(self):
-        """ Ensure that an instance of MD8475CellularSimulator
-            is returned when requesting md8475_version A
-        """
-        self.test.unpack_userparams(md8475_version='A', md8475a_ip_address='12345')
-        try:
-            with mock.patch.object(anritsu.MD8475CellularSimulator,
-                                   '__init__',
-                                   return_value=None):
-                result = self.test.initialize_simulator()
-                self.assertTrue(
-                    isinstance(result, anritsu.MD8475CellularSimulator),
-                    'Incorrect simulator type returned for md8475_version A')
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_initialize_simulator_md8475_B(self):
-        """ Ensure that an instance of MD8475BCellularSimulator
-            is returned when requesting md8475_version B
-        """
-        self.test.unpack_userparams(md8475_version='B', md8475a_ip_address='12345')
-        try:
-            with mock.patch.object(anritsu.MD8475BCellularSimulator,
-                                   '__init__',
-                                   return_value=None):
-                result = self.test.initialize_simulator()
-                self.assertTrue(
-                    isinstance(result, anritsu.MD8475BCellularSimulator),
-                    'Incorrect simulator type returned for md8475_version B')
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_initialize_simulator_cmw500(self):
-        """ Ensure that an instance of CMW500CellularSimulator
-            is returned when requesting cmw500
-        """
-        self.test.unpack_userparams(md8475_version=None,
-                               md8475a_ip_address=None,
-                               cmw500_ip='12345',
-                               cmw500_port='12345')
-        try:
-            with mock.patch.object(cmw.CMW500CellularSimulator,
-                                   '__init__',
-                                   return_value=None):
-                result = self.test.initialize_simulator()
-                self.assertTrue(
-                    isinstance(result, cmw.CMW500CellularSimulator),
-                    'Incorrect simulator type returned for cmw500')
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_initialize_simulator_throws_with_missing_configs(self):
-        """ Ensure that an error is raised when initialize_simulator
-            is called with missing configs
-        """
-        self.test.unpack_userparams(md8475_version=None,
-                               md8475a_ip_address=None,
-                               cmw500_ip='12345',
-                               cmw500_port=None)
-        with self.assertRaises(RuntimeError), mock.patch.object(
-                cmw.CMW500CellularSimulator, '__init__', return_value=None):
-            self.test.initialize_simulator()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/power_tel_traffic_e2e_test.py b/src/antlion/test_utils_tests/power/tel/lab/power_tel_traffic_e2e_test.py
deleted file mode 100644
index be71fee..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/power_tel_traffic_e2e_test.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-import antlion.test_utils.power.cellular.cellular_traffic_power_test as ctpt
-import mobly.config_parser as mobly_config_parser
-from antlion.controllers.cellular_lib.LteSimulation import LteSimulation
-from antlion.controllers.rohdeschwarz_lib import cmw500_cellular_simulator as cmw
-
-magic_patch = lambda patched: mock.patch(patched, mock.MagicMock())
-
-
-class PowerTelTrafficE2eTest(unittest.TestCase):
-    """ E2E sanity test for the power cellular traffic tests """
-    @classmethod
-    def setUpClass(cls):
-        cls.PTTT = ctpt.PowerTelTrafficTest
-        cls.PTTT.log = mock.Mock()
-        cls.PTTT.log_path = ''
-
-    @magic_patch('json.load')
-    @magic_patch('builtins.open')
-    @magic_patch('os.chmod')
-    @magic_patch('os.system')
-    @magic_patch('time.sleep')
-    @magic_patch(
-        'acts_contrib.test_utils.power.cellular.cellular_power_base_test.telutils')
-    @magic_patch('acts_contrib.test_utils.power.PowerBaseTest.wutils')
-    @magic_patch(
-        'acts.metrics.loggers.blackbox.BlackboxMetricLogger.for_test_case')
-    @magic_patch(
-        'acts_contrib.test_utils.power.loggers.power_metric_logger.PowerMetricLogger.for_test_case'
-    )
-    def test_e2e(self, *args):
-
-        # Configure the test
-        test_to_mock = 'test_lte_traffic_direction_dlul_blimit_0_0'
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PTTT(test_run_config)
-        mock_android = mock.Mock()
-        mock_android.model = 'coral'
-        test.unpack_userparams(
-            android_devices=[mock_android],
-            monsoons=[mock.Mock()],
-            iperf_servers=[mock.Mock(), mock.Mock()],
-            packet_senders=[mock.Mock(), mock.Mock()],
-            custom_files=[
-                'pass_fail_threshold_coral.json', 'rockbottom_coral.sh'
-            ],
-            simulation=mock.Mock(spec=LteSimulation),
-            mon_freq=5000,
-            mon_duration=0,
-            mon_offset=0,
-            current_test_name=test_to_mock,
-            test_name=test_to_mock,
-            test_result=mock.Mock(),
-            bug_report={},
-            dut_rockbottom=mock.Mock(),
-            start_tel_traffic=mock.Mock(),
-            init_simulation=mock.Mock(),
-            initialize_simulator=mock.Mock(return_value=mock.Mock(
-                spec=cmw.CMW500CellularSimulator)),
-            collect_power_data=mock.Mock(),
-            get_iperf_results=mock.Mock(return_value={
-                'ul': 0,
-                'dl': 0
-            }),
-            pass_fail_check=mock.Mock())
-
-        # Emulate lifecycle
-        test.setup_class()
-        test.setup_test()
-        test.power_tel_traffic_test()
-        test.teardown_test()
-        test.teardown_class()
-
-        self.assertTrue(test.start_tel_traffic.called,
-                        'Start traffic was not called')
-        self.assertTrue(test.init_simulation.called,
-                        'Simulation was not initialized')
-        self.assertTrue(test.initialize_simulator.called,
-                        'Simulator was not initialized')
-        self.assertTrue(test.collect_power_data.called,
-                        'Power data was not collected')
-        self.assertTrue(test.get_iperf_results.called,
-                        'Did not get iperf results')
-        self.assertTrue(test.pass_fail_check.called,
-                        'Pass/Fail check was not performed')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/save_summary_to_file_test.py b/src/antlion/test_utils_tests/power/tel/lab/save_summary_to_file_test.py
deleted file mode 100644
index 556cfdb..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/save_summary_to_file_test.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-from unittest.mock import mock_open
-
-import mobly.config_parser as mobly_config_parser
-from antlion.controllers.cellular_lib.LteSimulation import LteSimulation
-
-
-class SaveSummaryToFileTest(unittest.TestCase):
-    """ Unit tests for testing the save summary functionality for
-        instances of PowerCellularLabBaseTest
-    """
-
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-    def test_save_summary_to_file(self):
-        """ Ensure that a new file is written when saving
-            the test summary
-        """
-        self.test.unpack_userparams(simulation=mock.Mock(spec=LteSimulation))
-        m = mock_open()
-        with mock.patch('builtins.open', m, create=False):
-            self.test.save_summary_to_file()
-        self.assertTrue(m.called, 'Test summary was not written to output')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/tests/BUILD.gn b/src/antlion/tests/BUILD.gn
new file mode 100644
index 0000000..e0d98ba
--- /dev/null
+++ b/src/antlion/tests/BUILD.gn
@@ -0,0 +1,31 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_host, "antlion tests only supported for host testing")
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    "dhcp:e2e_tests",
+    "examples:e2e_tests",
+    "wlan:e2e_tests",
+    "wlan_policy:e2e_tests",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    "examples:e2e_tests_quick",
+    "wlan:e2e_tests_quick",
+    "wlan_policy:e2e_tests",
+  ]
+}
+
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    "wlan:e2e_tests_manual",
+  ]
+}
diff --git a/src/antlion/tests/bt/BleFuchsiaAndroidTest.py b/src/antlion/tests/bt/BleFuchsiaAndroidTest.py
deleted file mode 100644
index 982c3ac..0000000
--- a/src/antlion/tests/bt/BleFuchsiaAndroidTest.py
+++ /dev/null
@@ -1,164 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This script shows simple examples of how to get started with bluetooth
-   low energy testing in acts.
-"""
-
-import pprint
-
-from antlion.controllers import android_device
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_modes
-from antlion.test_utils.bt.bt_constants import adv_succ
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_constants import scan_result
-from antlion.test_utils.bt.bt_test_utils import cleanup_scanners_and_advertisers
-from antlion.test_utils.bt.bt_test_utils import reset_bluetooth
-
-
-class BleFuchsiaAndroidTest(BluetoothBaseTest):
-    default_timeout = 10
-    active_adv_callback_list = []
-    droid = None
-
-    def setup_class(self):
-        super().setup_class()
-
-        # Android device under test
-        self.ad = self.android_devices[0]
-        # Fuchsia device under test
-        self.fd = self.fuchsia_devices[0]
-        self.log.info("There are: {} fuchsia and {} android devices.".format(
-            len(self.fuchsia_devices), len(self.android_devices)))
-
-    def _start_generic_advertisement_include_device_name(self):
-        self.ad.droid.bleSetAdvertiseDataIncludeDeviceName(True)
-        self.ad.droid.bleSetAdvertiseSettingsAdvertiseMode(
-            ble_advertise_settings_modes['low_latency'])
-        advertise_data = self.ad.droid.bleBuildAdvertiseData()
-        advertise_settings = self.ad.droid.bleBuildAdvertiseSettings()
-        advertise_callback = self.ad.droid.bleGenBleAdvertiseCallback()
-        self.ad.droid.bleStartBleAdvertising(advertise_callback,
-                                             advertise_data,
-                                             advertise_settings)
-        self.ad.ed.pop_event(adv_succ.format(advertise_callback),
-                             self.default_timeout)
-        self.active_adv_callback_list.append(advertise_callback)
-        return advertise_callback
-
-    # Basic test for android device as advertiser and fuchsia device as scanner
-    # Returns True if scan result has an entry corresponding to sample_android_name
-    @BluetoothBaseTest.bt_test_wrap
-    def test_fuchsia_scan_android_adv(self):
-        sample_android_name = "Pixel1234"
-        self.ad.droid.bluetoothSetLocalName(sample_android_name)
-        adv_callback = self._start_generic_advertisement_include_device_name()
-        droid_name = self.ad.droid.bluetoothGetLocalName()
-        self.log.info("Android device name: {}".format(droid_name))
-        res = True
-        if not le_scan_for_device_by_name(
-                self.fd, self.log, sample_android_name, self.default_timeout):
-            res = False
-
-        #Stop android advertising
-        self.ad.droid.bleStopBleAdvertising(adv_callback)
-
-        return res
-
-    # Test for fuchsia device attempting to connect to android device (peripheral)
-    # Also tests the list_services and discconect to a peripheral
-    @BluetoothBaseTest.bt_test_wrap
-    def test_fuchsia_connect_android_periph(self):
-        sample_android_name = "Pixel1234"
-        self.ad.droid.bluetoothStartPairingHelper()
-        self.ad.droid.bluetoothSetLocalName(sample_android_name)
-        adv_callback = self._start_generic_advertisement_include_device_name()
-        droid_name = self.ad.droid.bluetoothGetLocalName()
-        self.log.info("Android device name: {}".format(droid_name))
-
-        scan_result = le_scan_for_device_by_name(self.fd, self.log,
-                                                 sample_android_name,
-                                                 self.default_timeout)
-        if not scan_result:
-            return False
-
-        name, did, connectable = scan_result["name"], scan_result[
-            "id"], scan_result["connectable"]
-
-        connect = self.fd.sl4f.gattc_lib.bleConnectToPeripheral(did)
-        self.log.info("Connecting returned status: {}".format(connect))
-
-        services = self.fd.sl4f.gattc_lib.listServices(did)
-        self.log.info("Listing services returned: {}".format(services))
-
-        dconnect = self.fd.sl4f.gattc_lib.bleDisconnectPeripheral(did)
-        self.log.info("Disconnect status: {}".format(dconnect))
-
-        #Print clients to validate results are saved
-        self.fd.sl4f.print_clients()
-
-        #Stop android advertising + cleanup sl4f
-        self.ad.droid.bleStopBleAdvertising(adv_callback)
-
-        return True
-
-    # Currently, this test doesn't work. The android device does not scan
-    # TODO(): Debug android scan
-    @BluetoothBaseTest.bt_test_wrap
-    def test_fuchsia_adv_android_scan(self):
-        #Initialize advertising on fuchsia device with name and interval
-        fuchsia_name = "testADV123"
-        adv_data = {
-            "name": fuchsia_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        interval = 1000
-
-        #Start advertising
-        self.fd.sl4f.ble_lib.bleStartBleAdvertising(adv_data, scan_response,
-                                                    interval, connectable)
-
-        # Initialize scan on android device which scan settings + callback
-        filter_list = self.ad.droid.bleGenFilterList()
-        self.ad.droid.bleSetScanFilterDeviceName(fuchsia_name)
-        self.ad.droid.bleSetScanSettingsScanMode(
-            ble_scan_settings_modes['low_latency'])
-        scan_settings = self.ad.droid.bleBuildScanSetting()
-        scan_callback = self.ad.droid.bleGenScanCallback()
-        self.ad.droid.bleBuildScanFilter(filter_list)
-        self.ad.droid.bleStartBleScan(filter_list, scan_settings,
-                                      scan_callback)
-        event_name = scan_result.format(scan_callback)
-        try:
-            event = self.ad.ed.pop_event(event_name, self.default_timeout)
-            self.log.info("Found scan result: {}".format(
-                pprint.pformat(event)))
-        except Exception:
-            self.log.error("Didn't find any scan results.")
-            return False
-        finally:
-            self.fd.sl4f.ble_lib.bleStopBleAdvertising()
-            self.ad.droid.bleStopBleScan(scan_callback)
-        # TODO(): Validate result
-        return True
diff --git a/src/antlion/tests/bt/BleFuchsiaTest.py b/src/antlion/tests/bt/BleFuchsiaTest.py
deleted file mode 100644
index 6730924..0000000
--- a/src/antlion/tests/bt/BleFuchsiaTest.py
+++ /dev/null
@@ -1,143 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This scrip tests various BLE apis for Fuchsia devices.
-"""
-
-import random
-
-from antlion.base_test import BaseTestClass
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-
-
-class BleFuchsiaTest(BaseTestClass):
-    default_timeout = 10
-    active_scan_callback_list = []
-    active_adv_callback_list = []
-    droid = None
-
-    def setup_class(self):
-        super().setup_class()
-
-        if (len(self.fuchsia_devices) < 2):
-            self.log.error("BleFuchsiaTest Init: Not enough fuchsia devices.")
-        self.log.info("Running testbed setup with two fuchsia devices")
-        self.fuchsia_adv = self.fuchsia_devices[0]
-        self.fuchsia_scan = self.fuchsia_devices[1]
-
-    def test_fuchsia_publish_service(self):
-        service_primary = True
-        # Random uuid
-        service_type = "0000180f-0000-1000-8000-00805fffffff"
-
-        # Generate a random key for sl4f storage of proxy key
-        service_proxy_key = "SProxy" + str(random.randint(0, 1000000))
-        res = self.fuchsia_adv.sl4f.ble_lib.blePublishService(
-            service_primary, service_type, service_proxy_key)
-        self.log.info("Publish result: {}".format(res))
-
-        return True
-
-    def test_fuchsia_scan_fuchsia_adv(self):
-        # Initialize advertising on fuchsia dveice with name and interval
-        fuchsia_name = "testADV1234"
-        adv_data = {
-            "name": fuchsia_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        interval = 1000
-        res = True
-
-        # Start advertising
-        self.fuchsia_adv.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, interval, connectable)
-        self.log.info("Fuchsia advertising name: {}".format(fuchsia_name))
-
-        # Start scan
-        scan_result = le_scan_for_device_by_name(self.fuchsia_scan, self.log,
-                                                 fuchsia_name,
-                                                 self.default_timeout)
-        if not scan_result:
-            res = False
-
-        # Stop advertising
-        self.fuchsia_adv.sl4f.ble_lib.bleStopBleAdvertising()
-
-        return res
-
-    def test_fuchsia_gatt_fuchsia_periph(self):
-        # Create random service with primary, and uuid
-        service_primary = True
-        # Random uuid
-        service_type = "0000180f-0000-1000-8000-00805fffffff"
-
-        # Generate a random key for sl4f storage of proxy key
-        service_proxy_key = "SProxy" + str(random.randint(0, 1000000))
-        res = self.fuchsia_adv.sl4f.ble_lib.blePublishService(
-            service_primary, service_type, service_proxy_key)
-        self.log.info("Publish result: {}".format(res))
-
-        # Initialize advertising on fuchsia dveice with name and interval
-        fuchsia_name = "testADV1234"
-        adv_data = {
-            "name": fuchsia_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        interval = 1000
-
-        # Start advertising
-        self.fuchsia_adv.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, interval, connectable)
-        self.log.info("Fuchsia advertising name: {}".format(fuchsia_name))
-
-        # Start Scan
-        scan_result = le_scan_for_device_by_name(self.fuchsia_scan, self.log,
-                                                 fuchsia_name,
-                                                 self.default_timeout)
-        if not scan_result:
-            self.fuchsia_adv.sl4f.ble_lib.bleStopBleAdvertising()
-            return False
-
-        name, did, connectable = scan_result["name"], scan_result[
-            "id"], scan_result["connectable"]
-
-        connect = self.fuchsia_scan.sl4f.gattc_lib.bleConnectToPeripheral(did)
-        self.log.info("Connecting returned status: {}".format(connect))
-
-        services = self.fuchsia_scan.sl4f.gattc_lib.listServices(did)
-        self.log.info("Listing services returned: {}".format(services))
-
-        dconnect = self.fuchsia_scan.sl4f.gattc_lib.bleDisconnectPeripheral(
-            did)
-        self.log.info("Disconnect status: {}".format(dconnect))
-
-        # Stop fuchsia advertising
-        self.fuchsia_adv.sl4f.ble_lib.bleStopBleAdvertising()
-
-        return True
diff --git a/src/antlion/tests/bt/BluetoothCmdLineTest.py b/src/antlion/tests/bt/BluetoothCmdLineTest.py
deleted file mode 100644
index 588d095..0000000
--- a/src/antlion/tests/bt/BluetoothCmdLineTest.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for initializing a cmd line tool for PTS and other purposes.
-Required custom config parameters:
-'target_mac_address': '00:00:00:00:00:00'
-
-"""
-from antlion.base_test import BaseTestClass
-from command_input import CommandInput
-
-
-class BluetoothCmdLineTest(BaseTestClass):
-    target_device_name = ""
-
-    def setup_class(self):
-        super().setup_class()
-        dut = self.user_params.get('dut', None)
-        if dut:
-            if dut == 'fuchsia_devices':
-                self.dut = self.fuchsia_devices[0]
-                self.dut.sl4f.bts_lib.initBluetoothSys()
-                self.dut.sl4f.sdp_lib.init()
-            elif dut == 'android_devices':
-                self.dut = self.android_devices[0]
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an Fuchsia device
-            self.dut = self.fuchsia_devices[0]
-        if not "target_device_name" in self.user_params.keys():
-            self.log.warning("Missing user config \"target_device_name\"!")
-            self.target_device_name = ""
-        else:
-            self.target_device_name = self.user_params["target_device_name"]
-
-    def test_cmd_line_helper(self):
-        cmd_line = CommandInput()
-        cmd_line.setup_vars(self.dut, self.target_device_name, self.log)
-        cmd_line.cmdloop()
-        return True
diff --git a/src/antlion/tests/bt/FuchsiaBtMacAddressTest.py b/src/antlion/tests/bt/FuchsiaBtMacAddressTest.py
deleted file mode 100644
index 67ee6dd..0000000
--- a/src/antlion/tests/bt/FuchsiaBtMacAddressTest.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is a test to verify two or more Fuchsia devices don't have the same mac
-address.
-
-Setup:
-This test requires at least two fuchsia devices.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-
-
-class FuchsiaBtMacAddressTest(BaseTestClass):
-    scan_timeout_seconds = 10
-
-    def setup_class(self):
-        super().setup_class()
-
-        if len(self.fuchsia_devices) < 2:
-            raise signals.TestAbortAll("Need at least two Fuchsia devices")
-        for device in self.fuchsia_devices:
-            device.sl4f.bts_lib.initBluetoothSys()
-
-    def test_verify_different_mac_addresses(self):
-        """Verify that all connected Fuchsia devices have unique mac addresses.
-
-        Steps:
-        1. Get mac address from each device
-
-        Expected Result:
-        Verify duplicate mac addresses don't exist.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BR/EDR, BT
-        Priority: 1
-        """
-        mac_addr_list = []
-        for device in self.fuchsia_devices:
-            mac_addr_list.append(
-                device.sl4f.bts_lib.getActiveAdapterAddress().get("result"))
-        if len(mac_addr_list) != len(set(mac_addr_list)):
-            raise signals.TestFailure(
-                "Found duplicate mac addresses {}.".format(mac_addr_list))
-        raise signals.TestPass(
-            "Success: All Bluetooth Mac address unique: {}".format(
-                mac_addr_list))
diff --git a/src/antlion/tests/bt/FuchsiaBtScanTest.py b/src/antlion/tests/bt/FuchsiaBtScanTest.py
deleted file mode 100644
index fd659a0..0000000
--- a/src/antlion/tests/bt/FuchsiaBtScanTest.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is a stress test for Fuchsia GATT connections.
-
-Setup:
-This test only requires two fuchsia devices as the purpose is to test
-the robusntess of GATT connections.
-"""
-
-import time
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-
-
-class FuchsiaBtScanTest(BaseTestClass):
-    scan_timeout_seconds = 30
-
-    def setup_class(self):
-        super().setup_class()
-        self.pri_dut = self.fuchsia_devices[0]
-        self.sec_dut = self.fuchsia_devices[1]
-
-        self.pri_dut.sl4f.bts_lib.initBluetoothSys()
-        self.sec_dut.sl4f.bts_lib.initBluetoothSys()
-
-    def test_scan_with_peer_set_non_discoverable(self):
-        """Test Bluetooth scan with peer set to non discoverable.
-
-        Steps:
-        1. Set peer device to a unique device name.
-        2. Set peer device to be non-discoverable.
-        3. Perform a BT Scan with primary dut with enough time to
-        gather results.
-
-        Expected Result:
-        Verify there are no results that match the unique device
-        name in step 1.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BR/EDR, BT
-        Priority: 1
-        """
-        local_name = generate_id_by_size(10)
-        self.sec_dut.sl4f.bts_lib.setName(local_name)
-        self.sec_dut.sl4f.bts_lib.setDiscoverable(False)
-
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(True)
-        time.sleep(self.scan_timeout_seconds)
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(False)
-        discovered_devices = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices()
-        for device in discovered_devices.get("result").values():
-            discoverd_name = device.get("name")
-            if discoverd_name is not None and discoverd_name is local_name:
-                raise signals.TestFailure(
-                    "Found peer unexpectedly: {}.".format(device))
-        raise signals.TestPass("Successfully didn't find peer device.")
-
-    def test_scan_with_peer_set_discoverable(self):
-        """Test Bluetooth scan with peer set to discoverable.
-
-        Steps:
-        1. Set peer device to a unique device name.
-        2. Set peer device to be discoverable.
-        3. Perform a BT Scan with primary dut with enough time to
-        gather results.
-
-        Expected Result:
-        Verify there is a result that match the unique device
-        name in step 1.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BR/EDR, BT
-        Priority: 1
-        """
-        local_name = generate_id_by_size(10)
-        self.log.info("Setting local peer name to: {}".format(local_name))
-        self.sec_dut.sl4f.bts_lib.setName(local_name)
-        self.sec_dut.sl4f.bts_lib.setDiscoverable(True)
-
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(True)
-        end_time = time.time() + self.scan_timeout_seconds
-        poll_timeout = 10
-        while time.time() < end_time:
-            discovered_devices = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices(
-            )
-            for device in discovered_devices.get("result").values():
-                self.log.info(device)
-                discoverd_name = device.get("name")
-                if discoverd_name is not None and discoverd_name in local_name:
-                    self.pri_dut.sl4f.bts_lib.requestDiscovery(False)
-                    raise signals.TestPass("Successfully found peer device.")
-            time.sleep(poll_timeout)
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(False)
-        raise signals.TestFailure("Unable to find peer device.")
diff --git a/src/antlion/tests/bt/GapSecSemTest.py b/src/antlion/tests/bt/GapSecSemTest.py
deleted file mode 100644
index 71a21c1..0000000
--- a/src/antlion/tests/bt/GapSecSemTest.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-PTS GAP/SEC/SEM Automation
-
-Optional custom parameter "collect_detailed_pass_logs"
-    Used to collect link keys and extra logs on pass results
-    to be used for certification purposes.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-from antlion.test_utils.fuchsia.bt_test_utils import get_link_keys
-from antlion.test_utils.fuchsia.bt_test_utils import unbond_all_known_devices
-from contextlib import suppress
-import inspect
-
-
-class GapSecSemTest(BaseTestClass):
-    gatt_connect_err_message = "Gatt connection failed with: {}"
-    gatt_disconnect_err_message = "Gatt disconnection failed with: {}"
-    ble_advertise_interval = 50
-    scan_timeout_seconds = 60
-
-    def setup_class(self):
-        super().setup_class()
-        self.pri_dut = self.fuchsia_devices[0]
-        # TODO: fxb/57968 Provide Facade for setting secure connections only mode,
-        # for the interim set this manually in the build.
-        self.sec_dut = self.fuchsia_devices[1]
-        for fd in self.fuchsia_devices:
-            fd.sl4f.bts_lib.initBluetoothSys()
-        # Optional user param for collecting enough information for
-        # certification on pass results.
-        self.collect_detailed_pass_logs = self.user_params.get(
-            "collect_detailed_pass_logs", False)
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            fd.take_bug_report(test_name, begin_time)
-
-    def teardown_test(self):
-        # Stop scanning and advertising on all devices at the end of a test.
-        with suppress(Exception):
-            for fd in self.fuchsia_devices:
-                fd.sl4f.ble_lib.bleStopBleAdvertising()
-                fd.bleStopBleScan()
-        for fd in self.fuchsia_devices:
-            unbond_all_known_devices(fd, self.log)
-
-    def teardown_class(self):
-        for fd in self.fuchsia_devices:
-            fd.sl4f.bts_lib.requestDiscovery(False)
-
-    def on_pass(self, test_name, begin_time):
-        if self.collect_detailed_pass_logs == True:
-            for fd in self.fuchsia_devices:
-                fd.take_bt_snoop_log(test_name)
-                fd.take_bug_report(test_name, begin_time)
-
-    def _orchestrate_gatt_connection(self, central, peripheral):
-        """ Orchestrate a GATT connetion from the input Central
-        Fuchsia device to the Peripheral Fuchsia device.
-        Args:
-                central: The central Fuchsia device
-                peripheral: The peripheral Fuchsia device
-                peripheral: The peripheral role Fuchsia device
-        Returns:
-            Dictionary of device info if connection successful.
-        """
-        adv_name = generate_id_by_size(10)
-        adv_data = {
-            "name": adv_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-
-        peripheral.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, self.ble_advertise_interval, connectable)
-        scan_filter = {"name_substring": adv_name}
-        central.sl4f.gattc_lib.bleStartBleScan(scan_filter)
-        device = le_scan_for_device_by_name(central,
-                                            self.log,
-                                            adv_name,
-                                            self.scan_timeout_seconds,
-                                            partial_match=False,
-                                            self_manage_scan=False)
-        if device is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-        connect_result = central.sl4f.gattc_lib.bleConnectToPeripheral(
-            device["id"])
-        if connect_result.get("error") is not None:
-            raise signals.TestFailure(
-                self.gatt_connect_err_message.format(
-                    connect_result.get("error")))
-        self.log.info("Connection Successful...")
-
-        return device
-
-    def _orchestrate_gap_sec_sem_37_to_44_test(self, test_name, central,
-                                               peripheral,
-                                               is_central_pairing_initiator,
-                                               security_level):
-        """ Performs GAP/SEC/SEM/BV-37 to 44 tests.
-            These set of tests deal with varying modes and directions of
-            service level connections with LE secure Connections.
-
-            Args:
-                test_name: The name of the test for logging purposes
-                central: The central role Fuchsia device
-                peripheral: The peripheral role Fuchsia device
-                is_central_pairing_initiator: True if Central should initiate
-                    the pairing. False if Peripheral should initiate.
-                security_level: 1 for Encrypted, 2 for Authenticated
-            Orchestration Steps:
-                1. Perform GATT connection from Central to Peripheral
-                2. Pair with specified security_level in direction specified
-                    by is_central_pairing_initiator.
-                3. Exchange pairing pins
-                4. Collect link keys if applicable
-                5. Disconnect device
-                6. Forget pairing.
-        """
-        input_capabilities = "NONE"
-        output_capabilities = "NONE"
-
-        central.sl4f.bts_lib.acceptPairing("KEYBOARD", "DISPLAY")
-
-        peripheral.sl4f.bts_lib.acceptPairing("KEYBOARD", "DISPLAY")
-
-        device = self._orchestrate_gatt_connection(central, peripheral)
-        # TODO: fxb/71289 Change once all peer IDs are ints and not strings
-        identifier = int("0x{}".format(device["id"]), 0)
-        bondable = True
-        transport = 2  #LE
-        if is_central_pairing_initiator:
-            pair_result = central.sl4f.bts_lib.pair(identifier, security_level,
-                                                    bondable, transport)
-        if not is_central_pairing_initiator:
-            device_list = peripheral.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            print(device_list)
-            for id_dict in device_list:
-                d = device_list[id_dict]
-                name = None
-                if d['connected'] is True:
-                    did = d['id']
-            pair_result = peripheral.sl4f.bts_lib.pair(did, security_level,
-                                                       bondable, transport)
-
-        pins_transferred = False
-        pairing_pin = central.sl4f.bts_lib.getPairingPin()['result']
-        if pairing_pin != "0" and pairing_pin is not None:
-            peripheral.sl4f.bts_lib.inputPairingPin(pairing_pin)
-            pins_transferred = True
-        if not pins_transferred:
-            pairing_pin = peripheral.sl4f.bts_lib.getPairingPin()['result']
-            if pairing_pin != "0":
-                central.sl4f.bts_lib.inputPairingPin(pairing_pin)
-
-        if self.collect_detailed_pass_logs == True:
-            save_path = f"{central.log_path}/{test_name}_stash_secure.store"
-            self.log.info(
-                f"Known Link Keys: {get_link_keys(central, save_path)}")
-            save_path = f"{peripheral.log_path}/{test_name}_stash_secure.store"
-            self.log.info(
-                f"Known Link Keys: {get_link_keys(peripheral, save_path)}")
-
-        disconnect_result = central.sl4f.gattc_lib.bleDisconnectPeripheral(
-            device["id"])
-        if disconnect_result.get("error") is not None:
-            raise signals.TestFailure(
-                self.gatt_disconnect_err_message.format(
-                    disconnect_result.get("error")))
-        self.log.info("Disconnection Successful...")
-
-        central.sl4f.bts_lib.forgetDevice(identifier)
-
-        raise signals.TestPass("Success")
-
-    def test_gap_sec_sem_bv_37_c(self):
-        central = self.pri_dut
-        peripheral = self.sec_dut
-        is_central_pairing_initiator = True
-        security_level = 1  # Encrypted
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_38_c(self):
-        central = self.pri_dut
-        peripheral = self.sec_dut
-        is_central_pairing_initiator = True
-        security_level = 2  # Authenticated
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_39_c(self):
-        central = self.pri_dut
-        peripheral = self.sec_dut
-        is_central_pairing_initiator = False
-        security_level = 1  # Encrypted
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_40_c(self):
-        central = self.pri_dut
-        peripheral = self.sec_dut
-        is_central_pairing_initiator = False
-        security_level = 2  # Authenticated
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_41_c(self):
-        central = self.sec_dut
-        peripheral = self.pri_dut
-        is_central_pairing_initiator = True
-        security_level = 1  # Encrypted
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_42_c(self):
-        central = self.sec_dut
-        peripheral = self.pri_dut
-        is_central_pairing_initiator = True
-        security_level = 2  # Authenticated
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_43_c(self):
-        central = self.sec_dut
-        peripheral = self.pri_dut
-        is_central_pairing_initiator = False
-        security_level = 1  # Encrypted
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_44_c(self):
-        central = self.sec_dut
-        peripheral = self.pri_dut
-        is_central_pairing_initiator = False
-        security_level = 2  # Authenticated
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
diff --git a/src/antlion/tests/bt/command_input.py b/src/antlion/tests/bt/command_input.py
deleted file mode 100644
index 46e0e3f..0000000
--- a/src/antlion/tests/bt/command_input.py
+++ /dev/null
@@ -1,3050 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Python script for wrappers to various libraries.
-
-Class CmdInput inherts from the cmd library.
-
-Functions that start with "do_" have a method
-signature that doesn't match the actual command
-line command and that is intended. This is so the
-"help" command knows what to display (in this case
-the documentation of the command itself).
-
-For example:
-Looking at the function "do_tool_set_target_device_name"
-has the inputs self and line which is expected of this type
-of method signature. When the "help" command is done on the
-method name you get the function documentation as such:
-
-(Cmd) help tool_set_target_device_name
-
-        Description: Reset the target device name.
-        Input(s):
-            device_name: Required. The advertising name to connect to.
-        Usage: tool_set_target_device_name new_target_device name
-          Examples:
-            tool_set_target_device_name le_watch
-
-This is all to say this documentation pattern is expected.
-
-"""
-
-from antlion.test_utils.audio_analysis_lib.check_quality import quality_analysis
-from antlion.test_utils.bt.bt_constants import audio_bits_per_sample_32
-from antlion.test_utils.bt.bt_constants import audio_sample_rate_48000
-from antlion.test_utils.abstract_devices.bluetooth_device import create_bluetooth_device
-from antlion.test_utils.bt.bt_constants import bt_attribute_values
-from antlion.test_utils.bt.bt_constants import sig_appearance_constants
-from antlion.test_utils.bt.bt_constants import sig_uuid_constants
-from antlion.test_utils.fuchsia.sdp_records import sdp_pts_record_list
-
-import antlion.test_utils.bt.gatt_test_database as gatt_test_database
-
-import cmd
-import pprint
-import time
-"""Various Global Strings"""
-BASE_UUID = sig_uuid_constants['BASE_UUID']
-CMD_LOG = "CMD {} result: {}"
-FAILURE = "CMD {} threw exception: {}"
-BASIC_ADV_NAME = "fs_test"
-
-
-class CommandInput(cmd.Cmd):
-    ble_adv_interval = 1000
-    ble_adv_appearance = None
-    ble_adv_data_include_tx_power_level = False
-    ble_adv_include_name = True
-    ble_adv_include_scan_response = False
-    ble_adv_name = "fs_test"
-    ble_adv_data_manufacturer_data = None
-    ble_adv_data_service_data = None
-    ble_adv_data_service_uuid_list = None
-    ble_adv_data_uris = None
-
-    bt_control_ids = []
-    bt_control_names = []
-    bt_control_devices = []
-    bt_scan_poll_timer = 0.5
-    target_device_name = ""
-    le_ids = []
-    unique_mac_addr_id = None
-
-    def setup_vars(self, dut, target_device_name, log):
-        self.pri_dut = dut
-        # Note: test_dut is the start of a slow conversion from a Fuchsia specific
-        # Tool to an abstract_device tool. Only commands that use test_dut will work
-        # Otherwise this tool is primarially targeted at Fuchsia devices.
-        self.test_dut = create_bluetooth_device(self.pri_dut)
-        self.test_dut.initialize_bluetooth_controller()
-        self.target_device_name = target_device_name
-        self.log = log
-
-    def emptyline(self):
-        pass
-
-    def do_EOF(self, line):
-        "End Script"
-        return True
-
-    """ Useful Helper functions and cmd line tooling """
-
-    def str_to_bool(self, s):
-        if s.lower() == 'true':
-            return True
-        elif s.lower() == 'false':
-            return False
-
-    def _find_unique_id_over_le(self):
-        scan_filter = {"name_substring": self.target_device_name}
-        self.unique_mac_addr_id = None
-        self.pri_dut.sl4f.gattc_lib.bleStartBleScan(scan_filter)
-        tries = 10
-        for i in range(tries):
-            time.sleep(self.bt_scan_poll_timer)
-            scan_res = self.pri_dut.sl4f.gattc_lib.bleGetDiscoveredDevices(
-            )['result']
-            for device in scan_res:
-                name, did, connectable = device["name"], device["id"], device[
-                    "connectable"]
-                if (self.target_device_name in name):
-                    self.unique_mac_addr_id = did
-                    self.log.info(
-                        "Successfully found device: name, id: {}, {}".format(
-                            name, did))
-                    break
-            if self.unique_mac_addr_id:
-                break
-        self.pri_dut.sl4f.gattc_lib.bleStopBleScan()
-
-    def _find_unique_id_over_bt_control(self):
-        self.unique_mac_addr_id = None
-        self.bt_control_devices = []
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(True)
-        tries = 10
-        for i in range(tries):
-            if self.unique_mac_addr_id:
-                break
-            time.sleep(self.bt_scan_poll_timer)
-            device_list = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            for id_dict in device_list:
-                device = device_list[id_dict]
-                self.bt_control_devices.append(device)
-                name = None
-                if device['name'] is not None:
-                    name = device['name']
-                did, address = device['id'], device['address']
-
-                self.bt_control_ids.append(did)
-                if name is not None:
-                    self.bt_control_names.append(name)
-                    if self.target_device_name in name:
-                        self.unique_mac_addr_id = did
-                        self.log.info(
-                            "Successfully found device: name, id, address: {}, {}, {}"
-                            .format(name, did, address))
-                        break
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(False)
-
-    def do_tool_take_bt_snoop_log(self, custom_name):
-        """
-        Description: Takes the bt snoop log from the Fuchsia device.
-        Logs will show up in your config files' logpath directory.
-
-        Input(s):
-            custom_name: Optional. Override the default pcap file name.
-
-        Usage: tool_set_target_device_name new_target_device name
-          Examples:
-            tool_take_bt_snoop_log connection_error
-            tool_take_bt_snoop_log
-        """
-        self.pri_dut.take_bt_snoop_log(custom_name)
-
-    def do_tool_refresh_unique_id(self, line):
-        """
-        Description: Refresh command line tool mac unique id.
-        Usage:
-          Examples:
-            tool_refresh_unique_id
-        """
-        try:
-            self._find_unique_id_over_le()
-        except Exception as err:
-            self.log.error(
-                "Failed to scan or find scan result: {}".format(err))
-
-    def do_tool_refresh_unique_id_using_bt_control(self, line):
-        """
-        Description: Refresh command line tool mac unique id.
-        Usage:
-          Examples:
-            tool_refresh_unique_id_using_bt_control
-        """
-        try:
-            self._find_unique_id_over_bt_control()
-        except Exception as err:
-            self.log.error(
-                "Failed to scan or find scan result: {}".format(err))
-
-    def do_tool_set_target_device_name(self, line):
-        """
-        Description: Reset the target device name.
-        Input(s):
-            device_name: Required. The advertising name to connect to.
-        Usage: tool_set_target_device_name new_target_device name
-          Examples:
-            tool_set_target_device_name le_watch
-        """
-        self.log.info("Setting target_device_name to: {}".format(line))
-        self.target_device_name = line
-
-    def do_tool_set_unique_mac_addr_id(self, line):
-        """
-        Description: Sets the unique mac address id (Specific to Fuchsia)
-        Input(s):
-            device_id: Required. The id to set the unique mac address id to
-        Usage: tool_set_unique_mac_addr_id device_id
-          Examples:
-            tool_set_unique_mac_addr_id 7fb2cae53aad9e0d
-        """
-        self.unique_mac_addr_id = line
-
-    """Begin BLE advertise wrappers"""
-
-    def complete_ble_adv_data_include_name(self, text, line, begidx, endidx):
-        roles = ["true", "false"]
-        if not text:
-            completions = roles
-        else:
-            completions = [s for s in roles if s.startswith(text)]
-        return completions
-
-    def do_ble_adv_data_include_name(self, line):
-        cmd = "Include name in the advertisement."
-        try:
-            self.ble_adv_include_name = self.str_to_bool(line)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_adv_data_set_name(self, line):
-        cmd = "Set the name to be included in the advertisement."
-        try:
-            self.ble_adv_name = line
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_ble_adv_data_set_appearance(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(sig_appearance_constants.keys())
-        else:
-            completions = [
-                s for s in sig_appearance_constants.keys()
-                if s.startswith(text)
-            ]
-        return completions
-
-    def do_ble_adv_data_set_appearance(self, line):
-        cmd = "Set the appearance to known SIG values."
-        try:
-            self.ble_adv_appearance = line
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_ble_adv_data_include_tx_power_level(self, text, line, begidx,
-                                                     endidx):
-        options = ['true', 'false']
-        if not text:
-            completions = list(options)[:]
-        else:
-            completions = [s for s in options if s.startswith(text)]
-        return completions
-
-    def do_ble_adv_data_include_tx_power_level(self, line):
-        """Include the tx_power_level in the advertising data.
-        Description: Adds tx_power_level to the advertisement data to the BLE
-            advertisement.
-        Input(s):
-            value: Required. True or False
-        Usage: ble_adv_data_include_tx_power_level bool_value
-          Examples:
-            ble_adv_data_include_tx_power_level true
-            ble_adv_data_include_tx_power_level false
-        """
-        cmd = "Include tx_power_level in advertisement."
-        try:
-            self.ble_adv_data_include_tx_power_level = self.str_to_bool(line)
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def complete_ble_adv_include_scan_response(self, text, line, begidx,
-                                               endidx):
-        options = ['true', 'false']
-        if not text:
-            completions = list(options)[:]
-        else:
-            completions = [s for s in options if s.startswith(text)]
-        return completions
-
-    def do_ble_adv_include_scan_response(self, line):
-        """Include scan response in advertisement. inputs: [true|false]
-            Note: Currently just sets the scan response data to the
-                Advertisement data.
-        """
-        cmd = "Include tx_power_level in advertisement."
-        try:
-            self.ble_adv_include_scan_response = self.str_to_bool(line)
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def do_ble_adv_data_add_manufacturer_data(self, line):
-        """Include manufacturer id and data to the advertisment
-        Description: Adds manufacturer data to the BLE advertisement.
-        Input(s):
-            id: Required. The int representing the manufacturer id.
-            data: Required. The string representing the data.
-        Usage: ble_adv_data_add_manufacturer_data id data
-          Examples:
-            ble_adv_data_add_manufacturer_data 1 test
-        """
-        cmd = "Include manufacturer id and data to the advertisment."
-        try:
-
-            info = line.split()
-            if self.ble_adv_data_manufacturer_data is None:
-                self.ble_adv_data_manufacturer_data = []
-            self.ble_adv_data_manufacturer_data.append({
-                "id": int(info[0]),
-                "data": info[1]
-            })
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def do_ble_adv_data_add_service_data(self, line):
-        """Include service data to the advertisment
-        Description: Adds service data to the BLE advertisement.
-        Input(s):
-            uuid: Required. The string representing the uuid.
-            data: Required. The string representing the data.
-        Usage: ble_adv_data_add_service_data uuid data
-          Examples:
-            ble_adv_data_add_service_data 00001801-0000-1000-8000-00805f9b34fb test
-        """
-        cmd = "Include manufacturer id and data to the advertisment."
-        try:
-            info = line.split()
-            if self.ble_adv_data_service_data is None:
-                self.ble_adv_data_service_data = []
-            self.ble_adv_data_service_data.append({
-                "uuid": info[0],
-                "data": info[1]
-            })
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def do_ble_adv_add_service_uuid_list(self, line):
-        """Include a list of service uuids to the advertisment:
-        Description: Adds service uuid list to the BLE advertisement.
-        Input(s):
-            uuid: Required. A list of N string UUIDs to add.
-        Usage: ble_adv_add_service_uuid_list uuid0 uuid1 ... uuidN
-          Examples:
-            ble_adv_add_service_uuid_list 00001801-0000-1000-8000-00805f9b34fb
-            ble_adv_add_service_uuid_list 00001801-0000-1000-8000-00805f9b34fb 00001802-0000-1000-8000-00805f9b34fb
-        """
-        cmd = "Include service uuid list to the advertisment data."
-        try:
-            self.ble_adv_data_service_uuid_list = line
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def do_ble_adv_data_set_uris(self, uris):
-        """Set the URIs of the LE advertisement data:
-        Description: Adds list of String UIRs
-          See (RFC 3986 1.1.2 https://tools.ietf.org/html/rfc3986)
-          Valid URI examples:
-            ftp://ftp.is.co.za/rfc/rfc1808.txt
-            http://www.ietf.org/rfc/rfc2396.txt
-            ldap://[2001:db8::7]/c=GB?objectClass?one
-            mailto:John.Doe@example.com
-            news:comp.infosystems.www.servers.unix
-            tel:+1-816-555-1212
-            telnet://192.0.2.16:80/
-            urn:oasis:names:specification:docbook:dtd:xml:4.1.2
-        Input(s):
-            uris: Required. A list of URIs to add.
-        Usage: ble_adv_data_set_uris uri0 uri1 ... uriN
-          Examples:
-            ble_adv_data_set_uris telnet://192.0.2.16:80/
-            ble_adv_data_set_uris tel:+1-816-555-1212
-        """
-        cmd = "Set the appearance to known SIG values."
-        try:
-            self.ble_adv_data_uris = uris.split()
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def start_advertisement(self, connectable):
-        """ Handle setting advertising data and the advertisement
-            Note: After advertisement is successful, clears values set for
-                * Manufacturer data
-                * Appearance information
-                * Scan Response
-                * Service UUIDs
-                * URI list
-            Args:
-                connectable: Bool of whether to start a connectable
-                    advertisement or not.
-        """
-        adv_data_name = self.ble_adv_name
-        if not self.ble_adv_include_name:
-            adv_data_name = None
-
-        manufacturer_data = self.ble_adv_data_manufacturer_data
-
-        tx_power_level = None
-        if self.ble_adv_data_include_tx_power_level:
-            tx_power_level = 1  # Not yet implemented so set to 1
-
-        scan_response = self.ble_adv_include_scan_response
-
-        adv_data = {
-            "name": adv_data_name,
-            "appearance": self.ble_adv_appearance,
-            "service_data": self.ble_adv_data_service_data,
-            "tx_power_level": tx_power_level,
-            "service_uuids": self.ble_adv_data_service_uuid_list,
-            "manufacturer_data": manufacturer_data,
-            "uris": self.ble_adv_data_uris,
-        }
-
-        if not self.ble_adv_include_scan_response:
-            scan_response = None
-        else:
-            scan_response = adv_data
-
-        result = self.pri_dut.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, self.ble_adv_interval, connectable)
-        self.log.info("Result of starting advertisement: {}".format(result))
-        self.ble_adv_data_manufacturer_data = None
-        self.ble_adv_appearance = None
-        self.ble_adv_include_scan_response = False
-        self.ble_adv_data_service_uuid_list = None
-        self.ble_adv_data_uris = None
-        self.ble_adv_data_service_data = None
-
-    def do_ble_start_generic_connectable_advertisement(self, line):
-        """
-        Description: Start a connectable LE advertisement
-
-        Usage: ble_start_generic_connectable_advertisement
-        """
-        cmd = "Start a connectable LE advertisement"
-        try:
-            connectable = True
-            self.start_advertisement(connectable)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_start_generic_nonconnectable_advertisement(self, line):
-        """
-        Description: Start a non-connectable LE advertisement
-
-        Usage: ble_start_generic_nonconnectable_advertisement
-        """
-        cmd = "Start a nonconnectable LE advertisement"
-        try:
-            connectable = False
-            self.start_advertisement(connectable)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_stop_advertisement(self, line):
-        """
-        Description: Stop a BLE advertisement.
-        Usage: ble_stop_advertisement
-        """
-        cmd = "Stop a connectable LE advertisement"
-        try:
-            self.pri_dut.sl4f.ble_lib.bleStopBleAdvertising()
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End BLE advertise wrappers"""
-    """Begin GATT client wrappers"""
-
-    def complete_gattc_connect_by_id(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(self.le_ids)[:]
-        else:
-            completions = [s for s in self.le_ids if s.startswith(text)]
-        return completions
-
-    def do_gattc_connect_by_id(self, line):
-        """
-        Description: Connect to a LE peripheral.
-        Input(s):
-            device_id: Required. The unique device ID from Fuchsia
-                discovered devices.
-        Usage:
-          Examples:
-            gattc_connect device_id
-        """
-        cmd = "Connect to a LE peripheral by input ID."
-        try:
-
-            connection_status = self.pri_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-                line)
-            self.log.info("Connection status: {}".format(
-                pprint.pformat(connection_status)))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_connect(self, line):
-        """
-        Description: Connect to a LE peripheral.
-        Optional input: device_name
-        Input(s):
-            device_name: Optional. The peripheral ID to connect to.
-        Usage:
-          Examples:
-            gattc_connect
-            gattc_connect eddystone_123
-        """
-        cmd = "Connect to a LE peripheral."
-        try:
-            if len(line) > 0:
-                self.target_device_name = line
-                self.unique_mac_addr_id = None
-            if not self.unique_mac_addr_id:
-                try:
-                    self._find_unique_id()
-                except Exception as err:
-                    self.log.info("Failed to scan or find device.")
-                    return
-            connection_status = self.pri_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-                self.unique_mac_addr_id)
-            self.log.info("Connection status: {}".format(
-                pprint.pformat(connection_status)))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_connect_disconnect_iterations(self, line):
-        """
-        Description: Connect then disconnect to a LE peripheral multiple times.
-        Input(s):
-            iterations: Required. The number of iterations to run.
-        Usage:
-          Examples:
-            gattc_connect_disconnect_iterations 10
-        """
-        cmd = "Connect to a LE peripheral."
-        try:
-            if not self.unique_mac_addr_id:
-                try:
-                    self._find_unique_id()
-                except Exception as err:
-                    self.log.info("Failed to scan or find device.")
-                    return
-            for i in range(int(line)):
-                self.log.info("Running iteration {}".format(i + 1))
-                connection_status = self.pri_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-                    self.unique_mac_addr_id)
-                self.log.info("Connection status: {}".format(
-                    pprint.pformat(connection_status)))
-                time.sleep(4)
-                disc_status = self.pri_dut.sl4f.gattc_lib.bleDisconnectPeripheral(
-                    self.unique_mac_addr_id)
-                self.log.info("Disconnect status: {}".format(disc_status))
-                time.sleep(3)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_disconnect(self, line):
-        """
-        Description: Disconnect from LE peripheral.
-        Assumptions: Already connected to a peripheral.
-        Usage:
-          Examples:
-            gattc_disconnect
-        """
-        cmd = "Disconenct from LE peripheral."
-        try:
-            disconnect_status = self.pri_dut.sl4f.gattc_lib.bleDisconnectPeripheral(
-                self.unique_mac_addr_id)
-            self.log.info("Disconnect status: {}".format(disconnect_status))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_list_services(self, discover_chars):
-        """
-        Description: List services from LE peripheral.
-        Assumptions: Already connected to a peripheral.
-        Input(s):
-            discover_chars: Optional. An optional input to discover all
-                characteristics on the service.
-        Usage:
-          Examples:
-            gattc_list_services
-            gattc_list_services true
-        """
-        cmd = "List services from LE peripheral."
-        try:
-
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            self.log.info("Discovered Services: \n{}".format(
-                pprint.pformat(services)))
-            discover_characteristics = self.str_to_bool(discover_chars)
-            if discover_chars:
-                for service in services.get('result'):
-                    self.pri_dut.sl4f.gattc_lib.connectToService(
-                        self.unique_mac_addr_id, service.get('id'))
-                    chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics(
-                    )
-                    self.log.info("Discovered chars:\n{}".format(
-                        pprint.pformat(chars)))
-
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_connect_to_service(self, line):
-        """
-        Description: Connect to Peripheral GATT server service.
-        Assumptions: Already connected to peripheral.
-        Input(s):
-            service_id: Required. The service id reference on the GATT server.
-        Usage:
-          Examples:
-            gattc_connect_to_service service_id
-        """
-        cmd = "GATT client connect to GATT server service."
-        try:
-            self.pri_dut.sl4f.gattc_lib.connectToService(
-                self.unique_mac_addr_id, int(line))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_discover_characteristics(self, line):
-        """
-        Description: Discover characteristics from a connected service.
-        Assumptions: Already connected to a GATT server service.
-        Usage:
-          Examples:
-            gattc_discover_characteristics
-        """
-        cmd = "Discover and list characteristics from a GATT server."
-        try:
-            chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-            self.log.info("Discovered chars:\n{}".format(
-                pprint.pformat(chars)))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_notify_all_chars(self, line):
-        """
-        Description: Enable all notifications on all Characteristics on
-            a GATT server.
-        Assumptions: Basic GATT connection made.
-        Usage:
-          Examples:
-            gattc_notify_all_chars
-        """
-        cmd = "Read all characteristics from the GATT service."
-        try:
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    # quick char filter for apple-4 test... remove later
-                    print("found uuid {}".format(char_uuid))
-                    try:
-                        self.pri_dut.sl4f.gattc_lib.enableNotifyCharacteristic(
-                            char_id)
-                    except Exception as err:
-                        print("error enabling notification")
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_all_chars(self, line):
-        """
-        Description: Read all Characteristic values from a GATT server across
-            all services.
-        Assumptions: Basic GATT connection made.
-        Usage:
-          Examples:
-            gattc_read_all_chars
-        """
-        cmd = "Read all characteristics from the GATT service."
-        try:
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    try:
-                        read_val =  \
-                            self.pri_dut.sl4f.gattc_lib.readCharacteristicById(
-                                char_id)
-                        print("  Characteristic uuid / Value: {} / {}".format(
-                            char_uuid, read_val['result']))
-                        str_value = ""
-                        for val in read_val['result']:
-                            str_value += chr(val)
-                        print("    str val: {}".format(str_value))
-                    except Exception as err:
-                        print(err)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_all_desc(self, line):
-        """
-        Description: Read all Descriptors values from a GATT server across
-            all services.
-        Assumptions: Basic GATT connection made.
-        Usage:
-          Examples:
-            gattc_read_all_chars
-        """
-        cmd = "Read all descriptors from the GATT service."
-        try:
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading descs in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    descriptors = char['descriptors']
-                    print("  Reading descs in char uuid: {}".format(char_uuid))
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        desc_uuid = desc["uuid_type"]
-                    try:
-                        read_val = self.pri_dut.sl4f.gattc_lib.readDescriptorById(
-                            desc_id)
-                        print("    Descriptor uuid / Value: {} / {}".format(
-                            desc_uuid, read_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_all_desc(self, line):
-        """
-        Description: Write a value to all Descriptors on the GATT server.
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            offset: Required. The offset to start writing to.
-            size: Required. The size of bytes to write (value will be generated).
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_all_desc 0 100
-            gattc_write_all_desc 10 2
-        """
-        cmd = "Read all descriptors from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Offset] [Size]")
-                return
-            offset = int(args[0])
-            size = args[1]
-            write_value = []
-            for i in range(int(size)):
-                write_value.append(i % 256)
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Writing descs in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    descriptors = char['descriptors']
-                    print("  Reading descs in char uuid: {}".format(char_uuid))
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        desc_uuid = desc["uuid_type"]
-                    try:
-                        write_val = self.pri_dut.sl4f.gattc_lib.writeDescriptorById(
-                            desc_id, offset, write_value)
-                        print("    Descriptor uuid / Result: {} / {}".format(
-                            desc_uuid, write_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_all_long_desc(self, line):
-        """
-        Description: Read all long Characteristic Descriptors
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            offset: Required. The offset to start reading from.
-            max_bytes: Required. The max size of bytes to return.
-        Usage:
-          Examples:
-            gattc_read_all_long_desc 0 100
-            gattc_read_all_long_desc 10 20
-        """
-        cmd = "Read all long descriptors from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Offset] [Size]")
-                return
-            offset = int(args[0])
-            max_bytes = int(args[1])
-            services = self.pri_dut.sl4f.ble_lib.bleListServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading descs in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    descriptors = char['descriptors']
-                    print("  Reading descs in char uuid: {}".format(char_uuid))
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        desc_uuid = desc["uuid_type"]
-                    try:
-                        read_val = self.pri_dut.sl4f.gattc_lib.readLongDescriptorById(
-                            desc_id, offset, max_bytes)
-                        print("    Descriptor uuid / Result: {} / {}".format(
-                            desc_uuid, read_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_all_long_char(self, line):
-        """
-        Description: Read all long Characteristic
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            offset: Required. The offset to start reading from.
-            max_bytes: Required. The max size of bytes to return.
-        Usage:
-          Examples:
-            gattc_read_all_long_char 0 100
-            gattc_read_all_long_char 10 20
-        """
-        cmd = "Read all long Characteristics from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Offset] [Size]")
-                return
-            offset = int(args[0])
-            max_bytes = int(args[1])
-            services = self.pri_dut.sl4f.ble_lib.bleListServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    try:
-                        read_val = self.pri_dut.sl4f.gattc_lib.readLongCharacteristicById(
-                            char_id, offset, max_bytes)
-                        print("    Char uuid / Result: {} / {}".format(
-                            char_uuid, read_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_all_chars(self, line):
-        """
-        Description: Write all characteristic values from a GATT server across
-            all services.
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            offset: Required. The offset to start writing on.
-            size: The write value size (value will be generated)
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_all_chars 0 10
-            gattc_write_all_chars 10 1
-        """
-        cmd = "Read all characteristics from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Offset] [Size]")
-                return
-            offset = int(args[0])
-            size = int(args[1])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Writing chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    try:
-                        write_result = self.pri_dut.sl4f.gattc_lib.writeCharById(
-                            char_id, offset, write_value)
-                        print("  Characteristic uuid write result: {} / {}".
-                              format(char_uuid, write_result['result']))
-                    except Exception as err:
-                        print("error writing char {}".format(err))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_all_chars_without_response(self, line):
-        """
-        Description: Write all characteristic values from a GATT server across
-            all services.
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            size: The write value size (value will be generated).
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_all_chars_without_response 100
-        """
-        cmd = "Read all characteristics from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 1:
-                self.log.info("1 Arguments required: [Size]")
-                return
-            size = int(args[0])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    try:
-                        write_result = \
-                            self.pri_dut.sl4f.gattc_lib.writeCharByIdWithoutResponse(
-                                char_id, write_value)
-                        print("  Characteristic uuid write result: {} / {}".
-                              format(char_uuid, write_result['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_char_by_id(self, line):
-        """
-        Description: Write char by characteristic id reference.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            offset: The offset value to use
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_char_by_id char_id 0 5
-            gattc_write_char_by_id char_id 20 1
-        """
-        cmd = "Write to GATT server characteristic ."
-        try:
-            args = line.split()
-            if len(args) != 3:
-                self.log.info("3 Arguments required: [Id] [Offset] [Size]")
-                return
-            id = int(args[0], 16)
-            offset = int(args[1])
-            size = int(args[2])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            self.test_dut.gatt_client_write_characteristic_by_handle(
-                self.unique_mac_addr_id, id, offset, write_value)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_long_char_by_id(self, line):
-        """
-        Description: Write long char by characteristic id reference.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            offset: The offset value to use
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-            reliable_mode: Optional: Reliable writes represented as bool
-        Usage:
-          Examples:
-            gattc_write_long_char_by_id char_id 0 5
-            gattc_write_long_char_by_id char_id 20 1
-            gattc_write_long_char_by_id char_id 20 1 true
-            gattc_write_long_char_by_id char_id 20 1 false
-        """
-        cmd = "Long Write to GATT server characteristic ."
-        try:
-            args = line.split()
-            if len(args) < 3:
-                self.log.info("3 Arguments required: [Id] [Offset] [Size]")
-                return
-            id = int(args[0], 16)
-            offset = int(args[1])
-            size = int(args[2])
-            reliable_mode = False
-            if len(args) > 3:
-                reliable_mode = self.str_to_bool(args[3])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            self.test_dut.gatt_client_write_long_characteristic_by_handle(
-                self.unique_mac_addr_id, id, offset, write_value,
-                reliable_mode)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_long_desc_by_id(self, line):
-        """
-        Description: Write long char by descrioptor id reference.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            offset: The offset value to use
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_long_desc_by_id char_id 0 5
-            gattc_write_long_desc_by_id char_id 20 1
-        """
-        cmd = "Long Write to GATT server descriptor ."
-        try:
-            args = line.split()
-            if len(args) != 3:
-                self.log.info("3 Arguments required: [Id] [Offset] [Size]")
-                return
-            id = int(args[0], 16)
-            offset = int(args[1])
-            size = int(args[2])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            self.test_dut.gatt_client_write_long_descriptor_by_handle(
-                self.unique_mac_addr_id, id, offset, write_value)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_char_by_id_without_response(self, line):
-        """
-        Description: Write char by characteristic id reference without response.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_char_by_id_without_response char_id 5
-        """
-        cmd = "Write characteristic by id without response."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Id] [Size]")
-                return
-            id = int(args[0], 16)
-            size = args[1]
-            write_value = []
-            for i in range(int(size)):
-                write_value.append(i % 256)
-            self.test_dut.gatt_client_write_characteristic_without_response_by_handle(
-                self.unique_mac_addr_id, id, write_value)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_enable_notify_char_by_id(self, line):
-        """
-        Description: Enable Characteristic notification on Characteristic ID.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-        Usage:
-          Examples:
-            gattc_enable_notify_char_by_id char_id
-        """
-        cmd = "Enable notifications by Characteristic id."
-        try:
-            id = int(line, 16)
-            self.test_dut.gatt_client_enable_notifiy_characteristic_by_handle(
-                self.unique_mac_addr_id, id)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_disable_notify_char_by_id(self, line):
-        """
-        Description: Disable Characteristic notification on Characteristic ID.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-        Usage:
-          Examples:
-            gattc_disable_notify_char_by_id char_id
-        """
-        cmd = "Disable notify Characteristic by id."
-        try:
-            id = int(line, 16)
-            self.test_dut.gatt_client_disable_notifiy_characteristic_by_handle(
-                self.unique_mac_addr_id, id)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_char_by_id(self, line):
-        """
-        Description: Read Characteristic by ID.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-        Usage:
-          Examples:
-            gattc_read_char_by_id char_id
-        """
-        cmd = "Read Characteristic value by ID."
-        try:
-            id = int(line, 16)
-            read_val = self.test_dut.gatt_client_read_characteristic_by_handle(
-                self.unique_mac_addr_id, id)
-            self.log.info("Characteristic Value with id {}: {}".format(
-                id, read_val))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_char_by_uuid(self, characteristic_uuid):
-        """
-        Description: Read Characteristic by UUID (read by type).
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_uuid: The characteristic id reference on the GATT
-                service
-        Usage:
-          Examples:
-            gattc_read_char_by_id char_id
-        """
-        cmd = "Read Characteristic value by ID."
-        try:
-            short_uuid_len = 4
-            if len(characteristic_uuid) == short_uuid_len:
-                characteristic_uuid = BASE_UUID.format(characteristic_uuid)
-
-            read_val = self.test_dut.gatt_client_read_characteristic_by_uuid(
-                self.unique_mac_addr_id, characteristic_uuid)
-            self.log.info("Characteristic Value with id {}: {}".format(
-                id, read_val))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_desc_by_id(self, line):
-        """
-        Description: Write Descriptor by characteristic id reference.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            descriptor_id: The Descriptor id reference on the GATT service
-            offset: The offset value to use
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_desc_by_id desc_id 0 5
-            gattc_write_desc_by_id desc_id 20 1
-        """
-        cmd = "Write Descriptor by id."
-        try:
-            args = line.split()
-            id = int(args[0], 16)
-            offset = int(args[1])
-            size = args[2]
-            write_value = []
-            for i in range(int(size)):
-                write_value.append(i % 256)
-            write_result = self.test_dut.gatt_client_write_descriptor_by_handle(
-                self.unique_mac_addr_id, id, offset, write_value)
-            self.log.info("Descriptor Write result {}: {}".format(
-                id, write_result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_desc_by_id(self, line):
-        """
-        Description: Read Descriptor by ID.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            descriptor_id: The Descriptor id reference on the GATT service
-        Usage:
-          Examples:
-            gattc_read_desc_by_id desc_id
-        """
-        cmd = "Read Descriptor by ID."
-        try:
-            id = int(line, 16)
-            read_val = self.test_dut.gatt_client_read_descriptor_by_handle(
-                self.unique_mac_addr_id, id)
-            self.log.info("Descriptor Value with id {}: {}".format(
-                id, read_val))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_long_char_by_id(self, line):
-        """
-        Description: Read long Characteristic value by id.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            offset: The offset value to use.
-            max_bytes: The max bytes size to return.
-        Usage:
-          Examples:
-            gattc_read_long_char_by_id char_id 0 10
-            gattc_read_long_char_by_id char_id 20 1
-        """
-        cmd = "Read long Characteristic value by id."
-        try:
-            args = line.split()
-            if len(args) != 3:
-                self.log.info("3 Arguments required: [Id] [Offset] [Size]")
-                return
-            id = int(args[0], 16)
-            offset = int(args[1])
-            max_bytes = int(args[2])
-            read_val = self.test_dut.gatt_client_read_long_characteristic_by_handle(
-                self.unique_mac_addr_id, id, offset, max_bytes)
-            self.log.info("Characteristic Value with id {}: {}".format(
-                id, read_val['result']))
-
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End GATT client wrappers"""
-    """Begin LE scan wrappers"""
-
-    def _update_scan_results(self, scan_results):
-        self.le_ids = []
-        for scan in scan_results['result']:
-            self.le_ids.append(scan['id'])
-
-    def do_ble_start_scan(self, line):
-        """
-        Description: Perform a BLE scan.
-        Default filter name: ""
-        Optional input: filter_device_name
-        Usage:
-          Examples:
-            ble_start_scan
-            ble_start_scan eddystone
-        """
-        cmd = "Perform a BLE scan and list discovered devices."
-        try:
-            scan_filter = {"name_substring": ""}
-            if line:
-                scan_filter = {"name_substring": line}
-            self.pri_dut.sl4f.gattc_lib.bleStartBleScan(scan_filter)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_stop_scan(self, line):
-        """
-        Description: Stops a BLE scan and returns discovered devices.
-        Usage:
-          Examples:
-            ble_stop_scan
-        """
-        cmd = "Stops a BLE scan and returns discovered devices."
-        try:
-            scan_results = self.pri_dut.sl4f.gattc_lib.bleStopBleScan()
-            self._update_scan_results(scan_results)
-            self.log.info(pprint.pformat(scan_results))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_get_discovered_devices(self, line):
-        """
-        Description: Get discovered LE devices of an active scan.
-        Usage:
-          Examples:
-            ble_stop_scan
-        """
-        cmd = "Get discovered LE devices of an active scan."
-        try:
-            scan_results = self.pri_dut.sl4f.gattc_lib.bleGetDiscoveredDevices(
-            )
-            self._update_scan_results(scan_results)
-            self.log.info(pprint.pformat(scan_results))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End LE scan wrappers"""
-    """Begin GATT Server wrappers"""
-
-    def do_gatts_close(self, line):
-        """
-        Description: Close active GATT server.
-
-        Usage:
-          Examples:
-            gatts_close
-        """
-        cmd = "Close active GATT server."
-        try:
-            result = self.pri_dut.sl4f.gatts_lib.closeServer()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_gatts_setup_database(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(
-                gatt_test_database.GATT_SERVER_DB_MAPPING.keys())
-        else:
-            completions = [
-                s for s in gatt_test_database.GATT_SERVER_DB_MAPPING.keys()
-                if s.startswith(text)
-            ]
-        return completions
-
-    def do_gatts_setup_database(self, line):
-        """
-        Description: Setup a Gatt server database based on pre-defined inputs.
-            Supports Tab Autocomplete.
-        Input(s):
-            descriptor_db_name: The descriptor db name that matches one in
-                acts_contrib.test_utils.bt.gatt_test_database
-        Usage:
-          Examples:
-            gatts_setup_database LARGE_DB_1
-        """
-        cmd = "Setup GATT Server Database Based of pre-defined dictionaries"
-        try:
-            scan_results = self.pri_dut.sl4f.gatts_lib.publishServer(
-                gatt_test_database.GATT_SERVER_DB_MAPPING.get(line))
-            self.log.info(scan_results)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End GATT Server wrappers"""
-    """Begin Bluetooth Controller wrappers"""
-
-    def complete_btc_pair(self, text, line, begidx, endidx):
-        """ Provides auto-complete for btc_pair cmd.
-
-        See Cmd module for full description.
-        """
-        arg_completion = len(line.split(" ")) - 1
-        pairing_security_level_options = ['ENCRYPTED', 'AUTHENTICATED', 'NONE']
-        bondable_options = ['BONDABLE', 'NON_BONDABLE', 'NONE']
-        transport_options = ['BREDR', 'LE']
-        if arg_completion == 1:
-            if not text:
-                completions = pairing_security_level_options
-            else:
-                completions = [
-                    s for s in pairing_security_level_options
-                    if s.startswith(text)
-                ]
-            return completions
-        if arg_completion == 2:
-            if not text:
-                completions = bondable_options
-            else:
-                completions = [
-                    s for s in bondable_options if s.startswith(text)
-                ]
-            return completions
-        if arg_completion == 3:
-            if not text:
-                completions = transport_options
-            else:
-                completions = [
-                    s for s in transport_options if s.startswith(text)
-                ]
-            return completions
-
-    def do_btc_pair(self, line):
-        """
-        Description: Sends an outgoing pairing request.
-
-        Input(s):
-            pairing security level: ENCRYPTED, AUTHENTICATED, or NONE
-            bondable: BONDABLE, NON_BONDABLE, or NONE
-            transport: BREDR or LE
-
-        Usage:
-          Examples:
-            btc_pair NONE NONE BREDR
-            btc_pair ENCRYPTED NONE LE
-            btc_pair AUTHENTICATED NONE LE
-            btc_pair NONE NON_BONDABLE BREDR
-        """
-        cmd = "Send an outgoing pairing request."
-        pairing_security_level_mapping = {
-            "ENCRYPTED": 1,
-            "AUTHENTICATED": 2,
-            "NONE": None,
-        }
-
-        bondable_mapping = {
-            "BONDABLE": True,
-            "NON_BONDABLE": False,
-            "NONE": None,
-        }
-
-        transport_mapping = {
-            "BREDR": 1,
-            "LE": 2,
-        }
-
-        try:
-            options = line.split(" ")
-            result = self.test_dut.init_pair(
-                self.unique_mac_addr_id,
-                pairing_security_level_mapping.get(options[0]),
-                bondable_mapping.get(options[1]),
-                transport_mapping.get(options[2]),
-            )
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_btc_accept_pairing(self, text, line, begidx, endidx):
-        """ Provides auto-complete for btc_set_io_capabilities cmd.
-
-        See Cmd module for full description.
-        """
-        arg_completion = len(line.split(" ")) - 1
-        input_options = ['NONE', 'CONFIRMATION', 'KEYBOARD']
-        output_options = ['NONE', 'DISPLAY']
-        if arg_completion == 1:
-            if not text:
-                completions = input_options
-            else:
-                completions = [s for s in input_options if s.startswith(text)]
-            return completions
-        if arg_completion == 2:
-            if not text:
-                completions = output_options
-            else:
-                completions = [s for s in output_options if s.startswith(text)]
-            return completions
-
-    def do_btc_accept_pairing(self, line):
-        """
-        Description: Accept all incoming pairing requests.
-
-        Input(s):
-            input: String - The input I/O capabilities to use
-                Available Values:
-                NONE - Input capability type None
-                CONFIRMATION - Input capability type confirmation
-                KEYBOARD - Input capability type Keyboard
-            output: String - The output I/O Capabilities to use
-                Available Values:
-                NONE - Output capability type None
-                DISPLAY - output capability type Display
-
-        Usage:
-          Examples:
-            btc_accept_pairing
-            btc_accept_pairing NONE DISPLAY
-            btc_accept_pairing NONE NONE
-            btc_accept_pairing KEYBOARD DISPLAY
-        """
-        cmd = "Accept incoming pairing requests"
-        try:
-            input_capabilities = "NONE"
-            output_capabilities = "NONE"
-            options = line.split(" ")
-            if len(options) > 1:
-                input_capabilities = options[0]
-                output_capabilities = options[1]
-            result = self.pri_dut.sl4f.bts_lib.acceptPairing(
-                input_capabilities, output_capabilities)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_forget_device(self, line):
-        """
-        Description: Forget pairing of the current device under test.
-            Current device under test is the device found by
-            tool_refresh_unique_id from custom user param. This function
-            will also perform a clean disconnect if actively connected.
-
-        Usage:
-          Examples:
-            btc_forget_device
-        """
-        cmd = "For pairing of the current device under test."
-        try:
-            self.log.info("Forgetting device id: {}".format(
-                self.unique_mac_addr_id))
-            result = self.pri_dut.sl4f.bts_lib.forgetDevice(
-                self.unique_mac_addr_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_set_discoverable(self, discoverable):
-        """
-        Description: Change Bluetooth Controller discoverablility.
-        Input(s):
-            discoverable: true to set discoverable
-                          false to set non-discoverable
-        Usage:
-          Examples:
-            btc_set_discoverable true
-            btc_set_discoverable false
-        """
-        cmd = "Change Bluetooth Controller discoverablility."
-        try:
-            result = self.test_dut.set_discoverable(
-                self.str_to_bool(discoverable))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_set_name(self, name):
-        """
-        Description: Change Bluetooth Controller local name.
-        Input(s):
-            name: The name to set the Bluetooth Controller name to.
-
-        Usage:
-          Examples:
-            btc_set_name fs_test
-        """
-        cmd = "Change Bluetooth Controller local name."
-        try:
-            result = self.test_dut.set_bluetooth_local_name(name)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_request_discovery(self, discover):
-        """
-        Description: Change whether the Bluetooth Controller is in active.
-            discovery or not.
-        Input(s):
-            discover: true to start discovery
-                      false to end discovery
-        Usage:
-          Examples:
-            btc_request_discovery true
-            btc_request_discovery false
-        """
-        cmd = "Change whether the Bluetooth Controller is in active."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.requestDiscovery(
-                self.str_to_bool(discover))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_get_known_remote_devices(self, line):
-        """
-        Description: Get a list of known devices.
-
-        Usage:
-          Examples:
-            btc_get_known_remote_devices
-        """
-        cmd = "Get a list of known devices."
-        self.bt_control_devices = []
-        try:
-            device_list = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            for id_dict in device_list:
-                device = device_list[id_dict]
-                self.bt_control_devices.append(device)
-                self.log.info("Device found {}".format(device))
-
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_forget_all_known_devices(self, line):
-        """
-        Description: Forget all known devices.
-
-        Usage:
-          Examples:
-            btc_forget_all_known_devices
-        """
-        cmd = "Forget all known devices."
-        try:
-            device_list = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            for device in device_list:
-                d = device_list[device]
-                if d['bonded'] or d['connected']:
-                    self.log.info("Unbonding deivce: {}".format(d))
-                    self.log.info(
-                        self.pri_dut.sl4f.bts_lib.forgetDevice(
-                            d['id'])['result'])
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_connect_device(self, line):
-        """
-        Description: Connect to device under test.
-            Device under test is specified by either user params
-            or
-                tool_set_target_device_name <name>
-                do_tool_refresh_unique_id_using_bt_control
-
-        Usage:
-          Examples:
-            btc_connect_device
-        """
-        cmd = "Connect to device under test."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.connectDevice(
-                self.unique_mac_addr_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_btc_connect_device_by_id(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(self.bt_control_ids)[:]
-        else:
-            completions = [
-                s for s in self.bt_control_ids if s.startswith(text)
-            ]
-        return completions
-
-    def do_btc_connect_device_by_id(self, device_id):
-        """
-        Description: Connect to device id based on pre-defined inputs.
-            Supports Tab Autocomplete.
-        Input(s):
-            device_id: The device id to connect to.
-
-        Usage:
-          Examples:
-            btc_connect_device_by_id <device_id>
-        """
-        cmd = "Connect to device id based on pre-defined inputs."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.connectDevice(device_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_btc_connect_device_by_name(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(self.bt_control_names)[:]
-        else:
-            completions = [
-                s for s in self.bt_control_names if s.startswith(text)
-            ]
-        return completions
-
-    def do_btc_connect_device_by_name(self, device_name):
-        """
-        Description: Connect to device id based on pre-defined inputs.
-            Supports Tab Autocomplete.
-        Input(s):
-            device_id: The device id to connect to.
-
-        Usage:
-          Examples:
-            btc_connect_device_by_name <device_id>
-        """
-        cmd = "Connect to device name based on pre-defined inputs."
-        try:
-            for device in self.bt_control_devices:
-                if device_name is device['name']:
-
-                    result = self.pri_dut.sl4f.bts_lib.connectDevice(
-                        device['id'])
-                    self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_disconnect_device(self, line):
-        """
-        Description: Disconnect to device under test.
-            Device under test is specified by either user params
-            or
-                tool_set_target_device_name <name>
-                do_tool_refresh_unique_id_using_bt_control
-
-        Usage:
-          Examples:
-            btc_disconnect_device
-        """
-        cmd = "Disconnect to device under test."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.disconnectDevice(
-                self.unique_mac_addr_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_init_bluetooth_control(self, line):
-        """
-        Description: Initialize the Bluetooth Controller.
-
-        Usage:
-          Examples:
-            btc_init_bluetooth_control
-        """
-        cmd = "Initialize the Bluetooth Controller."
-        try:
-            result = self.test_dut.initialize_bluetooth_controller()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_get_local_address(self, line):
-        """
-        Description: Get the local BR/EDR address of the Bluetooth Controller.
-
-        Usage:
-          Examples:
-            btc_get_local_address
-        """
-        cmd = "Get the local BR/EDR address of the Bluetooth Controller."
-        try:
-            result = self.test_dut.get_local_bluetooth_address()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_input_pairing_pin(self, line):
-        """
-        Description: Sends a pairing pin to SL4F's Bluetooth Control's
-        Pairing Delegate.
-
-        Usage:
-          Examples:
-            btc_input_pairing_pin 123456
-        """
-        cmd = "Input pairing pin to the Fuchsia device."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.inputPairingPin(line)['result']
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_get_pairing_pin(self, line):
-        """
-        Description: Gets the pairing pin from SL4F's Bluetooth Control's
-        Pairing Delegate.
-
-        Usage:
-          Examples:
-            btc_get_pairing_pin
-        """
-        cmd = "Get the pairing pin from the Fuchsia device."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.getPairingPin()['result']
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End Bluetooth Control wrappers"""
-    """Begin Profile Server wrappers"""
-
-    def do_sdp_pts_example(self, num_of_records):
-        """
-        Description: An example of how to setup a generic SDP record
-            and SDP search capabilities. This example will pass a few
-            SDP tests.
-
-        Input(s):
-            num_of_records: The number of records to add.
-
-        Usage:
-          Examples:
-            sdp_pts_example 1
-            sdp pts_example 10
-        """
-        cmd = "Setup SDP for PTS testing."
-
-        attributes = [
-            bt_attribute_values['ATTR_PROTOCOL_DESCRIPTOR_LIST'],
-            bt_attribute_values['ATTR_SERVICE_CLASS_ID_LIST'],
-            bt_attribute_values['ATTR_BLUETOOTH_PROFILE_DESCRIPTOR_LIST'],
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-        ]
-
-        try:
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['AudioSource'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['A/V_RemoteControl'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['PANU'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['SerialPort'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['DialupNetworking'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['OBEXObjectPush'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['OBEXFileTransfer'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['Headset'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['HandsfreeAudioGateway'],
-                                16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['Handsfree'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['SIM_Access'], 16))
-            for i in range(int(num_of_records)):
-                result = self.pri_dut.sl4f.sdp_lib.addService(
-                    sdp_pts_record_list[i])
-                self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_sdp_cleanup(self, line):
-        """
-        Description: Cleanup any existing SDP records
-
-        Usage:
-          Examples:
-            sdp_cleanup
-        """
-        cmd = "Cleanup SDP objects."
-        try:
-            result = self.pri_dut.sl4f.sdp_lib.cleanUp()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_sdp_init(self, line):
-        """
-        Description: Init the profile proxy for setting up SDP records
-
-        Usage:
-          Examples:
-            sdp_init
-        """
-        cmd = "Initialize profile proxy objects for adding SDP records"
-        try:
-            result = self.pri_dut.sl4f.sdp_lib.init()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_sdp_connect_l2cap(self, line):
-        """
-        Description: Send an l2cap connection request over an input psm value.
-
-        Note: Must be already connected to a peer.
-
-        Input(s):
-            psm: The int hex value to connect over. Available PSMs:
-                SDP 0x0001  See Bluetooth Service Discovery Protocol (SDP)
-                RFCOMM  0x0003  See RFCOMM with TS 07.10
-                TCS-BIN 0x0005  See Bluetooth Telephony Control Specification /
-                    TCS Binary
-                TCS-BIN-CORDLESS    0x0007  See Bluetooth Telephony Control
-                    Specification / TCS Binary
-                BNEP    0x000F  See Bluetooth Network Encapsulation Protocol
-                HID_Control 0x0011  See Human Interface Device
-                HID_Interrupt   0x0013  See Human Interface Device
-                UPnP    0x0015  See [ESDP]
-                AVCTP   0x0017  See Audio/Video Control Transport Protocol
-                AVDTP   0x0019  See Audio/Video Distribution Transport Protocol
-                AVCTP_Browsing  0x001B  See Audio/Video Remote Control Profile
-                UDI_C-Plane 0x001D  See the Unrestricted Digital Information
-                    Profile [UDI]
-                ATT 0x001F  See Bluetooth Core Specification​
-                ​3DSP   0x0021​ ​​See 3D Synchronization Profile.
-                ​LE_PSM_IPSP    ​0x0023 ​See Internet Protocol Support Profile
-                    (IPSP)
-                OTS 0x0025  See Object Transfer Service (OTS)
-                EATT    0x0027  See Bluetooth Core Specification
-            mode: String - The channel mode to connect to. Available values:
-                Basic mode: BASIC
-                Enhanced Retransmission mode: ERTM
-
-        Usage:
-          Examples:
-            sdp_connect_l2cap 0001 BASIC
-            sdp_connect_l2cap 0019 ERTM
-        """
-        cmd = "Connect l2cap"
-        try:
-            info = line.split()
-            result = self.pri_dut.sl4f.sdp_lib.connectL2cap(
-                self.unique_mac_addr_id, int(info[0], 16), info[1])
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End Profile Server wrappers"""
-    """Begin AVDTP wrappers"""
-
-    def do_avdtp_init(self, initiator_delay):
-        """
-        Description: Init the A2DP component start and AVDTP service to
-            initiate.
-
-        Input(s):
-            initiator_delay: [Optional] The stream initiator delay to set in
-            milliseconds.
-
-        Usage:
-          Examples:
-            avdtp_init 0
-            avdtp_init 2000
-            avdtp_init
-        """
-        cmd = "Initialize AVDTP proxy"
-        try:
-            if not initiator_delay:
-                initiator_delay = None
-            result = self.pri_dut.sl4f.avdtp_lib.init(initiator_delay)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_kill_a2dp(self, line):
-        """
-        Description: Quickly kill any A2DP components.
-
-        Usage:
-          Examples:
-            avdtp_kill_a2dp
-        """
-        cmd = "Kill A2DP service"
-        try:
-            self.pri_dut.start_v1_component("bt-a2dp")
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_get_connected_peers(self, line):
-        """
-        Description: Get the connected peers for the AVDTP service
-
-        Usage:
-          Examples:
-            avdtp_get_connected_peers
-        """
-        cmd = "AVDTP get connected peers"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.getConnectedPeers()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_set_configuration(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: set configuration
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_set_configuration <peer_id>
-        """
-        cmd = "Send AVDTP set configuration to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.setConfiguration(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_get_configuration(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: get configuration
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_get_configuration <peer_id>
-        """
-        cmd = "Send AVDTP get configuration to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.getConfiguration(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_get_capabilities(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: get capabilities
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_get_capabilities <peer_id>
-        """
-        cmd = "Send AVDTP get capabilities to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.getCapabilities(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_get_all_capabilities(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: get all capabilities
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_get_all_capabilities <peer_id>
-        """
-        cmd = "Send AVDTP get all capabilities to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.getAllCapabilities(
-                int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_reconfigure_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: reconfigure stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_reconfigure_stream <peer_id>
-        """
-        cmd = "Send AVDTP reconfigure stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.reconfigureStream(
-                int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_suspend_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: suspend stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_suspend_stream <peer_id>
-        """
-        cmd = "Send AVDTP suspend stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.suspendStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_suspend_reconfigure(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: suspend reconfigure
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_suspend_reconfigure <peer_id>
-        """
-        cmd = "Send AVDTP suspend reconfigure to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.suspendAndReconfigure(
-                int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_release_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: release stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_release_stream <peer_id>
-        """
-        cmd = "Send AVDTP release stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.releaseStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_establish_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: establish stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_establish_stream <peer_id>
-        """
-        cmd = "Send AVDTP establish stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.establishStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_start_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: start stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_start_stream <peer_id>
-        """
-        cmd = "Send AVDTP start stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.startStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_abort_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: abort stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_abort_stream <peer_id>
-        """
-        cmd = "Send AVDTP abort stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.abortStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_remove_service(self, line):
-        """
-        Description: Removes the AVDTP service in use.
-
-        Usage:
-          Examples:
-            avdtp_establish_stream <peer_id>
-        """
-        cmd = "Remove AVDTP service"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.removeService()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End AVDTP wrappers"""
-    """Begin Audio wrappers"""
-
-    def do_audio_start_output_save(self, line):
-        """
-        Description: Start audio output save
-
-        Usage:
-          Examples:
-            audio_start_output_save
-        """
-        cmd = "Start audio capture"
-        try:
-            result = self.pri_dut.sl4f.audio_lib.startOutputSave()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_audio_stop_output_save(self, line):
-        """
-        Description: Stop audio output save
-
-        Usage:
-          Examples:
-            audio_stop_output_save
-        """
-        cmd = "Stop audio capture"
-        try:
-            result = self.pri_dut.sl4f.audio_lib.stopOutputSave()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_audio_get_output_audio(self, line):
-        """
-        Description: Get the audio output saved to a local file
-
-        Usage:
-          Examples:
-            audio_get_output_audio
-        """
-        cmd = "Get audio capture"
-        try:
-            save_path = "{}/{}".format(self.pri_dut.log_path, "audio.raw")
-            result = self.pri_dut.sl4f.audio_lib.getOutputAudio(save_path)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_audio_5_min_test(self, line):
-        """
-        Description: Capture and anlyize sine audio waves played from a Bluetooth A2DP
-        Source device.
-
-        Pre steps:
-        1. Pair A2DP source device
-        2. Prepare generated SOX file over preferred codec on source device.
-            Quick way to generate necessary audio files:
-            sudo apt-get install sox
-            sox -b 16 -r 48000 -c 2 -n audio_file_2k1k_5_min.wav synth 300 sine 2000 sine 3000
-
-        Usage:
-          Examples:
-            audio_5_min_test
-        """
-        cmd = "5 min audio capture test"
-        input("Press Enter once Source device is streaming audio file")
-        try:
-            result = self.pri_dut.sl4f.audio_lib.startOutputSave()
-            self.log.info(result)
-            for i in range(5):
-                print("Minutes left: {}".format(10 - i))
-                time.sleep(60)
-            result = self.pri_dut.sl4f.audio_lib.stopOutputSave()
-            log_time = int(time.time())
-            save_path = "{}/{}".format(self.pri_dut.log_path,
-                                       "{}_audio.raw".format(log_time))
-            analysis_path = "{}/{}".format(
-                self.pri_dut.log_path,
-                "{}_audio_analysis.txt".format(log_time))
-            result = self.pri_dut.sl4f.audio_lib.getOutputAudio(save_path)
-
-            channels = 1
-            try:
-                quality_analysis(filename=save_path,
-                                 output_file=analysis_path,
-                                 bit_width=audio_bits_per_sample_32,
-                                 rate=audio_sample_rate_48000,
-                                 channel=channels,
-                                 spectral_only=False)
-
-            except Exception as err:
-                self.log.error("Failed to analyze raw audio: {}".format(err))
-                return False
-
-            self.log.info("Analysis output here: {}".format(analysis_path))
-            self.log.info("Analysis Results: {}".format(
-                open(analysis_path).readlines()))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End Audio wrappers"""
-    """Begin HFP wrappers"""
-
-    def do_hfp_init(self, line):
-        """
-        Description: Init the HFP component initiate.
-
-        Usage:
-          Examples:
-            hfp_init
-        """
-        cmd = "Initialize HFP proxy"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.init()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_remove_service(self, line):
-        """
-        Description: Removes the HFP service in use.
-
-        Usage:
-          Examples:
-            hfp_remove_service
-        """
-        cmd = "Remove HFP service"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.removeService()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_list_peers(self, line):
-        """
-        Description: List all HFP Hands-Free peers connected to the DUT.
-
-        Input(s):
-
-        Usage:
-          Examples:
-            hfp_list_peers
-        """
-        cmd = "Lists connected peers"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.listPeers()
-            self.log.info(pprint.pformat(result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_active_peer(self, line):
-        """
-        Description: Set the active HFP Hands-Free peer for the DUT.
-
-        Input(s):
-            peer_id: The id of the peer to be set active.
-
-        Usage:
-          Examples:
-            hfp_set_active_peer <peer_id>
-        """
-        cmd = "Set the active peer"
-        try:
-            peer_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setActivePeer(peer_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_list_calls(self, line):
-        """
-        Description: List all calls known to the sl4f component on the DUT.
-
-        Input(s):
-
-        Usage:
-          Examples:
-            hfp_list_calls
-        """
-        cmd = "Lists all calls"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.listCalls()
-            self.log.info(pprint.pformat(result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_new_call(self, line):
-        """
-        Description: Simulate a call on the call manager
-
-        Input(s):
-            remote: The number of the remote party on the simulated call
-            state: The state of the call. Must be one of "ringing", "waiting",
-                   "dialing", "alerting", "active", "held".
-            direction: The direction of the call. Must be one of "incoming", "outgoing".
-
-        Usage:
-          Examples:
-            hfp_new_call <remote> <state> <direction>
-            hfp_new_call 14085555555 active incoming
-            hfp_new_call 14085555555 held outgoing
-            hfp_new_call 14085555555 ringing incoming
-            hfp_new_call 14085555555 waiting incoming
-            hfp_new_call 14085555555 alerting outgoing
-            hfp_new_call 14085555555 dialing outgoing
-        """
-        cmd = "Simulates a call"
-        try:
-            info = line.strip().split()
-            if len(info) != 3:
-                raise ValueError(
-                    "Exactly three command line arguments required: <remote> <state> <direction>"
-                )
-            remote, state, direction = info[0], info[1], info[2]
-            result = self.pri_dut.sl4f.hfp_lib.newCall(remote, state,
-                                                       direction)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_incoming_call(self, line):
-        """
-        Description: Simulate an incoming call on the call manager
-
-        Input(s):
-            remote: The number of the remote party on the incoming call
-
-        Usage:
-          Examples:
-            hfp_incoming_call <remote>
-            hfp_incoming_call 14085555555
-        """
-        cmd = "Simulates an incoming call"
-        try:
-            remote = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.initiateIncomingCall(remote)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_waiting_call(self, line):
-        """
-        Description: Simulate an incoming call on the call manager when there is
-        an onging active call already.
-
-        Input(s):
-            remote: The number of the remote party on the incoming call
-
-        Usage:
-          Examples:
-            hfp_waiting_call <remote>
-            hfp_waiting_call 14085555555
-        """
-        cmd = "Simulates an incoming call"
-        try:
-            remote = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.initiateIncomingWaitingCall(
-                remote)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_outgoing_call(self, line):
-        """
-        Description: Simulate an outgoing call on the call manager
-
-        Input(s):
-            remote: The number of the remote party on the outgoing call
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <remote>
-        """
-        cmd = "Simulates an outgoing call"
-        try:
-            remote = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.initiateOutgoingCall(remote)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_call_active(self, line):
-        """
-        Description: Set the specified call to the "OngoingActive" state.
-
-        Input(s):
-            call_id: The unique id of the call.
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <call_id>
-        """
-        cmd = "Set the specified call to active"
-        try:
-            call_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setCallActive(call_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_call_held(self, line):
-        """
-        Description: Set the specified call to the "OngoingHeld" state.
-
-        Input(s):
-            call_id: The unique id of the call.
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <call_id>
-        """
-        cmd = "Set the specified call to held"
-        try:
-            call_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setCallHeld(call_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_call_terminated(self, line):
-        """
-        Description: Set the specified call to the "Terminated" state.
-
-        Input(s):
-            call_id: The unique id of the call.
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <call_id>
-        """
-        cmd = "Set the specified call to terminated"
-        try:
-            call_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setCallTerminated(call_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_call_transferred_to_ag(self, line):
-        """
-        Description: Set the specified call to the "TransferredToAg" state.
-
-        Input(s):
-            call_id: The unique id of the call.
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <call_id>
-        """
-        cmd = "Set the specified call to TransferredToAg"
-        try:
-            call_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setCallTransferredToAg(call_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_speaker_gain(self, line):
-        """
-        Description: Set the active peer's speaker gain.
-
-        Input(s):
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Usage:
-          Examples:
-            hfp_set_speaker_gain <value>
-        """
-        cmd = "Set the active peer's speaker gain"
-        try:
-            value = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setSpeakerGain(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_microphone_gain(self, line):
-        """
-        Description: Set the active peer's microphone gain.
-
-        Input(s):
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Usage:
-          Examples:
-            hfp_set_microphone_gain <value>
-        """
-        cmd = "Set the active peer's microphone gain"
-        try:
-            value = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setMicrophoneGain(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_service_available(self, line):
-        """
-        Description: Sets the simulated network service status reported by the call manager.
-
-        Input(s):
-            value: "true" to set the network connection to available.
-
-        Usage:
-          Examples:
-            hfp_set_service_available <value>
-            hfp_set_service_available true
-            hfp_set_service_available false
-        """
-        cmd = "Sets the simulated network service status reported by the call manager"
-        try:
-            value = line.strip() == "true"
-            result = self.pri_dut.sl4f.hfp_lib.setServiceAvailable(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_roaming(self, line):
-        """
-        Description: Sets the simulated roaming status reported by the call manager.
-
-        Input(s):
-            value: "true" to set the network connection to roaming.
-
-        Usage:
-          Examples:
-            hfp_set_roaming <value>
-            hfp_set_roaming true
-            hfp_set_roaming false
-        """
-        cmd = "Sets the simulated roaming status reported by the call manager"
-        try:
-            value = line.strip() == "true"
-            result = self.pri_dut.sl4f.hfp_lib.setRoaming(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_signal_strength(self, line):
-        """
-        Description: Sets the simulated signal strength reported by the call manager.
-
-        Input(s):
-            value: The signal strength value to set. Must be between 0-5 inclusive.
-
-        Usage:
-          Examples:
-            hfp_set_signal_strength <value>
-            hfp_set_signal_strength 0
-            hfp_set_signal_strength 3
-            hfp_set_signal_strength 5
-        """
-        cmd = "Sets the simulated signal strength reported by the call manager"
-        try:
-            value = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setSignalStrength(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_subscriber_number(self, line):
-        """
-        Description: Sets the subscriber number reported by the call manager.
-
-        Input(s):
-            value: The subscriber number to set. Maximum length 128 characters.
-
-        Usage:
-          Examples:
-            hfp_set_subscriber_number <value>
-            hfp_set_subscriber_number 14085555555
-        """
-        cmd = "Sets the subscriber number reported by the call manager"
-        try:
-            value = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.setSubscriberNumber(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_operator(self, line):
-        """
-        Description: Sets the operator value reported by the call manager.
-
-        Input(s):
-            value: The operator value to set. Maximum length 16 characters.
-
-        Usage:
-          Examples:
-            hfp_set_operator <value>
-            hfp_set_operator GoogleFi
-        """
-        cmd = "Sets the operator value reported by the call manager"
-        try:
-            value = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.setOperator(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_nrec_support(self, line):
-        """
-        Description: Sets the noise reduction/echo cancelation support reported by the call manager.
-
-        Input(s):
-            value: The nrec support bool.
-
-        Usage:
-          Examples:
-            hfp_set_nrec_support <value>
-            hfp_set_nrec_support true
-            hfp_set_nrec_support false
-        """
-        cmd = "Sets the noise reduction/echo cancelation support reported by the call manager"
-        try:
-            value = line.strip() == "true"
-            result = self.pri_dut.sl4f.hfp_lib.setNrecSupport(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_battery_level(self, line):
-        """
-        Description: Sets the battery level reported by the call manager.
-
-        Input(s):
-            value: The integer battery level value. Must be 0-5 inclusive.
-
-        Usage:
-          Examples:
-            hfp_set_battery_level <value>
-            hfp_set_battery_level 0
-            hfp_set_battery_level 3
-        """
-        cmd = "Set the battery level reported by the call manager"
-        try:
-            value = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setBatteryLevel(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_last_dialed(self, line):
-        """
-        Description: Sets the last dialed number in the call manager.
-
-        Input(s):
-            number: The number of the remote party.
-
-        Usage:
-          Examples:
-            hfp_set_last_dialed <number>
-            hfp_set_last_dialed 14085555555
-        """
-        cmd = "Sets the last dialed number in the call manager."
-        try:
-            number = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.setLastDialed(number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_clear_last_dialed(self, line):
-        """
-        Description: Clears the last dialed number in the call manager.
-
-        Usage:
-          Examples:
-            hfp_clear_last_dialed
-        """
-        cmd = "Clears the last dialed number in the call manager."
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.clearLastDialed()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_memory_location(self, line):
-        """
-        Description: Sets a memory location to point to a remote number.
-
-        Input(s):
-            location: The memory location at which to store the number.
-            number: The number of the remote party to be stored.
-
-        Usage:
-          Examples:
-            hfp_set_memory_location <location> <number>
-            hfp_set_memory_location 0 14085555555
-        """
-        cmd = "Sets a memory location to point to a remote number."
-        try:
-            info = line.strip().split()
-            if len(info) != 2:
-                raise ValueError(
-                    "Exactly two command line arguments required: <location> <number>"
-                )
-            location, number = info[0], info[1]
-            result = self.pri_dut.sl4f.hfp_lib.setMemoryLocation(
-                location, number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_clear_memory_location(self, line):
-        """
-        Description: Sets a memory location to point to a remote number.
-
-        Input(s):
-            localtion: The memory location to clear.
-
-        Usage:
-          Examples:
-            hfp_clear_memory_location <location>
-            hfp_clear_memory_location 0
-        """
-        cmd = "Sets a memory location to point to a remote number."
-        try:
-            location = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.clearMemoryLocation(location)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_dial_result(self, line):
-        """
-        Description: Sets the status result to be returned when the number is dialed.
-
-        Input(s):
-            number: The number of the remote party.
-            status: The status to be returned when an outgoing call is initiated to the number.
-
-        Usage:
-          Examples:
-            hfp_set_battery_level <value>
-        """
-        cmd = "Sets the status result to be returned when the number is dialed."
-        try:
-            info = line.strip().split()
-            if len(info) != 2:
-                raise ValueError(
-                    "Exactly two command line arguments required: <number> <status>"
-                )
-            number, status = info[0], int(info[1])
-            result = self.pri_dut.sl4f.hfp_lib.setDialResult(number, status)
-            self.log.info(pprint.pformat(result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_get_state(self, line):
-        """
-        Description: Get the call manager's complete state
-
-        Usage:
-          Examples:
-            hfp_get_state
-        """
-        cmd = "Get the call manager's state"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.getState()
-            self.log.info(pprint.pformat(result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_connection_behavior(self, line):
-        """
-        Description: Set the Service Level Connection (SLC) behavior when a new peer connects.
-
-        Input(s):
-            autoconnect: Enable/Disable autoconnection of SLC.
-
-        Usage:
-          Examples:
-            hfp_set_connection_behavior <autoconnect>
-            hfp_set_connection_behavior true
-            hfp_set_connection_behavior false
-        """
-        cmd = "Set the Service Level Connection (SLC) behavior"
-        try:
-            autoconnect = line.strip().lower() == "true"
-            result = self.pri_dut.sl4f.hfp_lib.setConnectionBehavior(
-                autoconnect)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End HFP wrappers"""
-    """Begin RFCOMM wrappers"""
-
-    def do_rfcomm_init(self, line):
-        """
-        Description: Initialize the RFCOMM component services.
-
-        Usage:
-          Examples:
-            rfcomm_init
-        """
-        cmd = "Initialize RFCOMM proxy"
-        try:
-            result = self.pri_dut.sl4f.rfcomm_lib.init()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_remove_service(self, line):
-        """
-        Description: Removes the RFCOMM service in use.
-
-        Usage:
-          Examples:
-            rfcomm_remove_service
-        """
-        cmd = "Remove RFCOMM service"
-        try:
-            result = self.pri_dut.sl4f.rfcomm_lib.removeService()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_disconnect_session(self, line):
-        """
-        Description: Closes the RFCOMM Session.
-
-        Usage:
-          Examples:
-            rfcomm_disconnect_session
-            rfcomm_disconnect_session
-        """
-        cmd = "Disconnect the RFCOMM Session"
-        try:
-            result = self.pri_dut.sl4f.rfcomm_lib.disconnectSession(
-                self.unique_mac_addr_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_connect_rfcomm_channel(self, line):
-        """
-        Description: Make an outgoing RFCOMM connection.
-
-        Usage:
-          Examples:
-            rfcomm_connect_rfcomm_channel <server_channel_number>
-            rfcomm_connect_rfcomm_channel 2
-        """
-        cmd = "Make an outgoing RFCOMM connection"
-        try:
-            server_channel_number = int(line.strip())
-            result = self.pri_dut.sl4f.rfcomm_lib.connectRfcommChannel(
-                self.unique_mac_addr_id, server_channel_number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_disconnect_rfcomm_channel(self, line):
-        """
-        Description: Close the RFCOMM connection with the peer
-
-        Usage:
-          Examples:
-            rfcomm_disconnect_rfcomm_channel <server_channel_number>
-            rfcomm_disconnect_rfcomm_channel 2
-        """
-        cmd = "Close the RFCOMM channel"
-        try:
-            server_channel_number = int(line.strip())
-            result = self.pri_dut.sl4f.rfcomm_lib.disconnectRfcommChannel(
-                self.unique_mac_addr_id, server_channel_number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_send_remote_line_status(self, line):
-        """
-        Description: Send a remote line status for the RFCOMM channel.
-
-        Usage:
-          Examples:
-            rfcomm_send_remote_line_status <server_channel_number>
-            rfcomm_send_remote_line_status 2
-        """
-        cmd = "Send a remote line status update for the RFCOMM channel"
-        try:
-            server_channel_number = int(line.strip())
-            result = self.pri_dut.sl4f.rfcomm_lib.sendRemoteLineStatus(
-                self.unique_mac_addr_id, server_channel_number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_write_rfcomm(self, line):
-        """
-        Description: Send data over the RFCOMM channel.
-
-        Usage:
-          Examples:
-            rfcomm_write_rfcomm <server_channel_number> <data>
-            rfcomm_write_rfcomm 2 foobar
-        """
-        cmd = "Send data using the RFCOMM channel"
-        try:
-            info = line.strip().split()
-            if len(info) != 2:
-                raise ValueError(
-                    "Exactly two command line arguments required: <server_channel_number> <data>"
-                )
-            server_channel_number = int(info[0])
-            data = info[1]
-            result = self.pri_dut.sl4f.rfcomm_lib.writeRfcomm(
-                self.unique_mac_addr_id, server_channel_number, data)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End RFCOMM wrappers"""
diff --git a/src/antlion/tests/bt/ep/BtFuchsiaEPTest.py b/src/antlion/tests/bt/ep/BtFuchsiaEPTest.py
deleted file mode 100644
index 626c259..0000000
--- a/src/antlion/tests/bt/ep/BtFuchsiaEPTest.py
+++ /dev/null
@@ -1,310 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Setup:
-This test only requires two fuchsia devices.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-from antlion.test_utils.fuchsia.bt_test_utils import bredr_scan_for_device_by_name
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-from antlion.test_utils.fuchsia.bt_test_utils import unbond_all_known_devices
-from antlion.test_utils.fuchsia.bt_test_utils import verify_device_state_by_name
-import time
-
-
-class BtFuchsiaEPTest(BaseTestClass):
-    ble_advertise_interval = 50
-    scan_timeout_seconds = 60
-    default_iterations = 1000
-    adv_name = generate_id_by_size(10)
-    test_adv_data = {
-        "name": adv_name,
-        "appearance": None,
-        "service_data": None,
-        "tx_power_level": None,
-        "service_uuids": None,
-        "manufacturer_data": None,
-        "uris": None,
-    }
-    test_connectable = True
-    test_scan_response = None
-
-    def setup_class(self):
-        super().setup_class()
-        for fd in self.fuchsia_devices:
-            fd.sl4f.bts_lib.initBluetoothSys()
-        self.pri_dut = self.fuchsia_devices[0]
-        self.sec_dut = self.fuchsia_devices[1]
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            fd.take_bug_report(test_name, begin_time)
-        self._unbond_all_known_devices()
-        self.sec_dut.sl4f.ble_lib.bleStopBleAdvertising()
-        self._kill_media_services()
-
-    def teardown_class(self):
-        self._kill_media_services()
-
-    def _kill_media_services(self):
-        """Kill any BT services related to A2DP/AVRCP on all Fuchsia devices.
-        """
-        ssh_timeout = 30
-        for fd in self.fuchsia_devices:
-            try:
-                fd.ssh.run("killall bt-a2dp*", timeout_sec=ssh_timeout)
-                fd.ssh.run("killall bt-avrcp*", timeout_sec=ssh_timeout)
-            except FuchsiaSSHError:
-                pass
-
-    def _unbond_all_known_devices(self):
-        """For all Fuchsia devices, unbond any known pairings.
-        """
-        time.sleep(5)
-        for fd in self.fuchsia_devices:
-            unbond_all_known_devices(fd, self.log)
-
-    def test_ble_awareness(self):
-        """Verify that Fuchsia devices can advertise and scan each other
-
-        Verify a Fuchsia device that starts a BLE advertisesement can be
-        found by a Fuchsia BLE scanner.
-
-        Steps:
-        1. On one Fuchsia device set an advertisement
-        2. On one Fuchsia device, scan for the advertisement by name
-
-        Expected Result:
-        Verify that there are no errors after each GATT connection.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BLE
-        Priority: 0
-        """
-
-        self.sec_dut.sl4f.ble_lib.bleStartBleAdvertising(
-            self.test_adv_data, self.test_scan_response,
-            self.ble_advertise_interval, self.test_connectable)
-
-        device = le_scan_for_device_by_name(self.pri_dut, self.log,
-                                            self.adv_name,
-                                            self.scan_timeout_seconds)
-        self.sec_dut.sl4f.ble_lib.bleStopBleAdvertising()
-        if device is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-        raise signals.TestPass("Success")
-
-    def test_gatt_central_peripheral(self):
-        """Verify that Fuchsia devices can perform GATT operations
-
-        Verify a Fuchsia devices can perform GATT connections and interactions.
-
-        Steps:
-        1. On one Fuchsia device set an advertisement
-        2. On one Fuchsia device, scan for the advertisement by name
-        3. Perform GATT connection over LE
-        4. Pair both devices.
-        5. Perform GATT read/write operations.
-        6. Perform GATT disconnection.
-
-        Expected Result:
-        Verify that there are no errors after each GATT connection.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BLE
-        Priority: 0
-        """
-        self._unbond_all_known_devices()
-
-        source_device_name = generate_id_by_size(10)
-        self.pri_dut.sl4f.bts_lib.setName(source_device_name)
-
-        self.sec_dut.sl4f.ble_lib.bleStartBleAdvertising(
-            self.test_adv_data, self.test_scan_response,
-            self.ble_advertise_interval, self.test_connectable)
-
-        device = le_scan_for_device_by_name(self.pri_dut, self.log,
-                                            self.adv_name,
-                                            self.scan_timeout_seconds)
-        if device is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-
-        connect_result = self.pri_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-            device["id"])
-        if connect_result.get("error") is not None:
-            raise signals.TestFailure("GATT Connection failed with: {}".format(
-                connect_result.get("error")))
-
-        if not verify_device_state_by_name(self.pri_dut, self.log,
-                                           self.adv_name, "CONNECTED", None):
-            raise signals.TestFailure(
-                "Failed to connect to device {}.".format(target_device_name))
-
-        if not verify_device_state_by_name(
-                self.sec_dut, self.log, source_device_name, "CONNECTED", None):
-            raise signals.TestFailure(
-                "Failed to connect to device {}.".format(source_device_name))
-
-        security_level = "ENCRYPTED"
-        non_bondable = False
-        transport = 2  #LE
-        self.pri_dut.sl4f.bts_lib.pair(device["id"], security_level,
-                                       non_bondable, transport)
-
-        services = None
-        if not verify_device_state_by_name(self.pri_dut, self.log,
-                                           self.adv_name, "BONDED", services):
-            raise signals.TestFailure(
-                "Failed to pair device {}.".format(target_device_name))
-
-        if not verify_device_state_by_name(self.sec_dut, self.log,
-                                           source_device_name, "BONDED",
-                                           services):
-            raise signals.TestFailure(
-                "Failed to pair device {}.".format(source_device_name))
-
-        disconnect_result = self.pri_dut.sl4f.gattc_lib.bleDisconnectPeripheral(
-            device["id"])
-        if disconnect_result.get("error") is not None:
-            raise signals.TestFailure(
-                "GATT Disconnection failed with: {}".format(
-                    connect_result.get("error")))
-
-        self.sec_dut.sl4f.ble_lib.bleStopBleAdvertising()
-
-        # TODO: Setup Proper GATT server and verify services published are found
-
-        raise signals.TestPass("Success")
-
-    def test_pairing_a2dp(self):
-        """Verify that Fuchsia devices can pair to each other and establish
-            an A2DP connection
-
-            Verify that Fuchsia devices can pair to each other and establish
-            an A2DP connection
-
-            Steps:
-            1. Clear out all bonded devices
-            2. Stop any A2DP services running on the device
-                Needed to take ownership of the services
-            3. Init sink and source opposite devices
-            4. Start pairing delegate for all Fuchsia devices
-            5. Set sink device to be discoverable
-            6. Discover sink device from source device
-            7. Connect to sink device from source device
-            8. Pair to sink device
-            9. Validate paired devices and services present
-
-            Expected Result:
-            Verify devices are successfully paired and appropriate a2dp
-            services are running.
-
-            Returns:
-            signals.TestPass if no errors
-            signals.TestFailure if there are any errors during the test.
-
-            TAGS: BREDR, A2DP
-            Priority: 0
-        """
-        self._unbond_all_known_devices()
-        self._kill_media_services()
-
-        source_device_name = generate_id_by_size(10)
-        target_device_name = generate_id_by_size(10)
-
-        self.pri_dut.sl4f.bts_lib.setName(source_device_name)
-        self.sec_dut.sl4f.bts_lib.setName(target_device_name)
-
-        input_capabilities = "NONE"
-        output_capabilities = "NONE"
-
-        # Initialize a2dp on both devices.
-        self.pri_dut.sl4f.avdtp_lib.init()
-        self.sec_dut.sl4f.avdtp_lib.init()
-
-        self.pri_dut.sl4f.bts_lib.acceptPairing(input_capabilities,
-                                                output_capabilities)
-
-        self.sec_dut.sl4f.bts_lib.acceptPairing(input_capabilities,
-                                                output_capabilities)
-        self.sec_dut.sl4f.bts_lib.setDiscoverable(True)
-
-        unique_mac_addr_id = bredr_scan_for_device_by_name(
-            self.pri_dut, self.log, target_device_name,
-            self.scan_timeout_seconds)
-
-        if not unique_mac_addr_id:
-            raise signals.TestFailure(
-                "Failed to find device {}.".format(target_device_name))
-
-        connect_result = self.pri_dut.sl4f.bts_lib.connectDevice(
-            unique_mac_addr_id)
-        if connect_result.get("error") is not None:
-            raise signals.TestFailure("Failed to connect with {}.".format(
-                connect_result.get("error")))
-
-        # We pair before checking the CONNECTED status because BR/EDR semantics
-        # were recently changed such that if pairing is not confirmed, then bt
-        # does not report connected = True.
-        security_level = "NONE"
-        bondable = True
-        transport = 1  #BREDR
-        pair_result = self.pri_dut.sl4f.bts_lib.pair(unique_mac_addr_id,
-                                                     security_level, bondable,
-                                                     transport)
-        if pair_result.get("error") is not None:
-            raise signals.TestFailure("Failed to pair with {}.".format(
-                pair_result.get("error")))
-
-        if not verify_device_state_by_name(
-                self.pri_dut, self.log, target_device_name, "CONNECTED", None):
-            raise signals.TestFailure(
-                "Failed to connect to device {}.".format(target_device_name))
-
-        if not verify_device_state_by_name(
-                self.sec_dut, self.log, source_device_name, "CONNECTED", None):
-            raise signals.TestFailure(
-                "Failed to connect to device {}.".format(source_device_name))
-
-        #TODO: Validation of services and paired devices (b/175641870)
-        # A2DP sink: 0000110b-0000-1000-8000-00805f9b34fb
-        # A2DP source: 0000110a-0000-1000-8000-00805f9b34fb
-        #TODO: Make an easy function for checking/updating devices
-        services = None
-        if not verify_device_state_by_name(self.pri_dut, self.log,
-                                           target_device_name, "BONDED",
-                                           services):
-            raise signals.TestFailure(
-                "Failed to pair device {}.".format(target_device_name))
-
-        if not verify_device_state_by_name(self.sec_dut, self.log,
-                                           source_device_name, "BONDED",
-                                           services):
-            raise signals.TestFailure(
-                "Failed to pair device {}.".format(source_device_name))
-
-        raise signals.TestPass("Success")
diff --git a/src/antlion/tests/bt/gatt/GattConnectionStressTest.py b/src/antlion/tests/bt/gatt/GattConnectionStressTest.py
deleted file mode 100644
index 42e2f92..0000000
--- a/src/antlion/tests/bt/gatt/GattConnectionStressTest.py
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is a stress test for Fuchsia GATT connections.
-
-Custom Params:
-gatt_connect_stress_test_iterations
-
-    Example:
-    "gatt_connect_stress_test_iterations": 10
-
-Setup:
-This test only requires two fuchsia devices as the purpose is to test
-the robusntess of GATT connections.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-
-
-class GattConnectionStressTest(BaseTestClass):
-    gatt_connect_err_message = "Gatt connection failed with: {}"
-    gatt_disconnect_err_message = "Gatt disconnection failed with: {}"
-    ble_advertise_interval = 50
-    scan_timeout_seconds = 60
-    default_iterations = 1000
-
-    def setup_class(self):
-        super().setup_class()
-        self.fuchsia_client_dut = self.fuchsia_devices[0]
-        self.fuchsia_server_dut = self.fuchsia_devices[1]
-        self.default_iterations = self.user_params.get(
-            "gatt_connect_stress_test_iterations", self.default_iterations)
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            fd.take_bug_report(test_name, begin_time)
-
-    def _orchestrate_single_connect_disconnect(self):
-        adv_name = generate_id_by_size(10)
-        adv_data = {
-            "name": adv_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        self.fuchsia_server_dut.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, self.ble_advertise_interval, connectable)
-        device = le_scan_for_device_by_name(self.fuchsia_client_dut, self.log,
-                                            adv_name,
-                                            self.scan_timeout_seconds)
-        if device is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-        connect_result = self.fuchsia_client_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-            device["id"])
-        if connect_result.get("error") is not None:
-            raise signals.TestFailure(
-                self.gatt_connect_err_message.format(
-                    connect_result.get("error")))
-        self.log.info("Connection Successful...")
-        disconnect_result = self.fuchsia_client_dut.sl4f.gattc_lib.bleDisconnectPeripheral(
-            device["id"])
-        if disconnect_result.get("error") is not None:
-            raise signals.TestFailure(
-                self.gatt_disconnect_err_message.format(
-                    connect_result.get("error")))
-        self.log.info("Disconnection Successful...")
-        self.fuchsia_server_dut.sl4f.ble_lib.bleStopBleAdvertising()
-
-    def test_connect_reconnect_n_iterations_over_le(self):
-        """Test GATT reconnection n times.
-
-        Verify that the GATT client device can discover and connect to
-        a perpheral n times. Default value is 1000.
-
-        Steps:
-        1. Setup Ble advertisement on peripheral with unique advertisement
-            name.
-        2. GATT client scans for peripheral advertisement.
-        3. Upon find the advertisement, send a connection request to
-            peripheral.
-
-        Expected Result:
-        Verify that there are no errors after each GATT connection.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: GATT
-        Priority: 1
-        """
-        for i in range(self.default_iterations):
-            self.log.info("Starting iteration {}".format(i + 1))
-            self._orchestrate_single_connect_disconnect()
-            self.log.info("Iteration {} successful".format(i + 1))
-        raise signals.TestPass("Success")
diff --git a/src/antlion/tests/bt/gatt/GattServerSetupTest.py b/src/antlion/tests/bt/gatt/GattServerSetupTest.py
deleted file mode 100644
index 035374a..0000000
--- a/src/antlion/tests/bt/gatt/GattServerSetupTest.py
+++ /dev/null
@@ -1,890 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This test exercises basic setup of various GATT server configurations.
-
-Setup:
-This test only requires one fuchsia device as the purpose is to test
-different configurations of valid GATT services.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-
-import gatt_server_databases as database
-
-
-class GattServerSetupTest(BaseTestClass):
-    err_message = "Setting up database failed with: {}"
-
-    def setup_class(self):
-        super().setup_class()
-        self.fuchsia_dut = self.fuchsia_devices[0]
-
-    def setup_database(self, database):
-        setup_result = self.fuchsia_dut.sl4f.gatts_lib.publishServer(database)
-        if setup_result.get("error") is None:
-            signals.TestPass(setup_result.get("result"))
-        else:
-            raise signals.TestFailure(
-                self.err_message.format(setup_result.get("error")))
-
-    def test_teardown(self):
-        self.fuchsia_dut.sl4f.gatts_lib.closeServer()
-
-    def test_single_primary_service(self):
-        """Test GATT Server Setup: Single Primary Service
-
-        Test a single primary service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.SINGLE_PRIMARY_SERVICE)
-
-    def test_single_secondary_service(self):
-        """Test GATT Server Setup: Single Secondary Service
-
-        Test a single secondary service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.SINGLE_SECONDARY_SERVICE)
-
-    def test_primary_and_secondary_service(self):
-        """Test GATT Server Setup: Primary and secondary service
-
-        Test primary and secondary service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.PRIMARY_AND_SECONDARY_SERVICES)
-
-    def test_duplicate_services(self):
-        """Test GATT Server Setup: Duplicate service uuids
-
-        Test duplicate service uuids as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.DUPLICATE_SERVICES)
-
-    ### Begin SIG defined services ###
-
-    def test_alert_notification_service(self):
-        """Test GATT Server Setup: Alert Notification Service
-
-        Test Alert Notification Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.ALERT_NOTIFICATION_SERVICE)
-
-    def test_automation_io_service(self):
-        """Test GATT Server Setup: Automation IO
-
-        Test Automation IO as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.AUTOMATION_IO_SERVICE)
-
-    def test_battery_service(self):
-        """Test GATT Server Setup: Battery Service
-
-        Test Battery Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.BATTERY_SERVICE)
-
-    def test_blood_pressure_service(self):
-        """Test GATT Server Setup: Blood Pressure
-
-        Test Blood Pressure as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.BLOOD_PRESSURE_SERVICE)
-
-    def test_body_composition_service(self):
-        """Test GATT Server Setup: Body Composition
-
-        Test Body Composition as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.BODY_COMPOSITION_SERVICE)
-
-    def test_bond_management_service(self):
-        """Test GATT Server Setup: Bond Management Service
-
-        Test Bond Management Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.BOND_MANAGEMENT_SERVICE)
-
-    def test_continuous_glucose_monitoring_service(self):
-        """Test GATT Server Setup: Continuous Glucose Monitoring
-
-        Test Continuous Glucose Monitoring as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.CONTINUOUS_GLUCOSE_MONITORING_SERVICE)
-
-    def test_current_time_service(self):
-        """Test GATT Server Setup: Current Time Service
-
-        Test Current Time Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.CURRENT_TIME_SERVICE)
-
-    def test_cycling_power_service(self):
-        """Test GATT Server Setup: Cycling Power
-
-        Test Cycling Power as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.CYCLING_POWER_SERVICE)
-
-    def test_cycling_speed_and_cadence_service(self):
-        """Test GATT Server Setup: Cycling Speed and Cadence
-
-        Test Cycling Speed and Cadence as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.CYCLING_SPEED_AND_CADENCE_SERVICE)
-
-    def test_device_information_service(self):
-        """Test GATT Server Setup: Device Information
-
-        Test Device Information as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.DEVICE_INFORMATION_SERVICE)
-
-    def test_environmental_sensing_service(self):
-        """Test GATT Server Setup: Environmental Sensing
-
-        Test Environmental Sensing as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.ENVIRONMENTAL_SENSING_SERVICE)
-
-    def test_fitness_machine_service(self):
-        """Test GATT Server Setup: Fitness Machine
-
-        Test Fitness Machine as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.FITNESS_MACHINE_SERVICE)
-
-    def test_glucose_service(self):
-        """Test GATT Server Setup: Glucose
-
-        Test Glucose as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.GLUCOSE_SERVICE)
-
-    def test_health_thermometer_service(self):
-        """Test GATT Server Setup: Health Thermometer
-
-        Test Health Thermometer as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.HEALTH_THERMOMETER_SERVICE)
-
-    def test_heart_rate_service(self):
-        """Test GATT Server Setup: Heart Rate
-
-        Test Heart Rate as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.HEART_RATE_SERVICE)
-
-    def test_http_proxy_service(self):
-        """Test GATT Server Setup: HTTP Proxy
-
-        Test HTTP Proxy as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.HTTP_PROXY_SERVICE)
-
-    def test_human_interface_device_service(self):
-        """Test GATT Server Setup: Human Interface Device
-
-    Test Human Interface Device as a GATT server input.
-
-    Steps:
-    1. Setup input database
-
-    Expected Result:
-    Verify that there are no errors after setting up the input database.
-
-    Returns:
-      signals.TestPass if no errors
-      signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.HUMAN_INTERFACE_DEVICE_SERVICE)
-
-    def test_immediate_alert_service(self):
-        """Test GATT Server Setup: Immediate Alert
-
-        Test Immediate Alert as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.IMMEDIATE_ALERT_SERVICE)
-
-    def test_indoor_positioning_service(self):
-        """Test GATT Server Setup: Indoor Positioning
-
-        Test Indoor Positioning as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.INDOOR_POSITIONING_SERVICE)
-
-    def test_insulin_delivery_service(self):
-        """Test GATT Server Setup: Insulin Delivery
-
-        Test Insulin Delivery as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.INSULIN_DELIVERY_SERVICE)
-
-    def test_internet_protocol_support_service(self):
-        """Test GATT Server Setup: Internet Protocol Support Service
-
-        Test Internet Protocol Support Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.INTERNET_PROTOCOL_SUPPORT_SERVICE)
-
-    def test_link_loss_service(self):
-        """Test GATT Server Setup: Link Loss
-
-        Test Link Loss as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.LINK_LOSS_SERVICE)
-
-    def test_location_and_navigation_service(self):
-        """Test GATT Server Setup: Location and Navigation
-
-        Test Location and Navigation as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.LOCATION_AND_NAVIGATION_SERVICE)
-
-    def test_mesh_provisioning_service(self):
-        """Test GATT Server Setup: Mesh Provisioning Service
-
-        Test Mesh Provisioning Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.MESH_PROVISIONING_SERVICE)
-
-    def test_mesh_proxy_service(self):
-        """Test GATT Server Setup: Mesh Proxy Service
-
-        Test Mesh Proxy Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.MESH_PROXY_SERVICE)
-
-    def test_next_dst_change_service(self):
-        """Test GATT Server Setup: Next DST Change Service
-
-        Test Next DST Change Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.NEXT_DST_CHANGE_SERVICE)
-
-    def test_object_transfer_service(self):
-        """Test GATT Server Setup: Object Transfer Service
-
-        Test Object Transfer Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.OBJECT_TRANSFER_SERVICE)
-
-    def test_phone_alert_status_service(self):
-        """Test GATT Server Setup: Phone Alert Status Service
-
-        Test Phone Alert Status Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.PHONE_ALERT_STATUS_SERVICE)
-
-    def test_pulse_oximeter_service(self):
-        """Test GATT Server Setup: Pulse Oximeter Service
-
-        Test Pulse Oximeter Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.PULSE_OXIMETER_SERVICE)
-
-    def test_reconnection_configuration_service(self):
-        """Test GATT Server Setup: Reconnection Configuration
-
-        Test Reconnection Configuration as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.RECONNECTION_CONFIGURATION_SERVICE)
-
-    def test_reference_time_update_service(self):
-        """Test GATT Server Setup: Reference Time Update Service
-
-        Test Reference Time Update Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.REFERENCE_TIME_UPDATE_SERVICE)
-
-    def test_running_speed_and_cadence_service(self):
-        """Test GATT Server Setup: Running Speed and Cadence
-
-        Test Running Speed and Cadence as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.RUNNING_SPEED_AND_CADENCE_SERVICE)
-
-    def test_scan_parameters_service(self):
-        """Test GATT Server Setup: Scan Parameters
-
-        Test Scan Parameters as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.SCAN_PARAMETERS_SERVICE)
-
-    def test_transport_discovery_service(self):
-        """Test GATT Server Setup: Transport Discovery
-
-        Test Transport Discovery as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.TRANSPORT_DISCOVERY_SERVICE)
-
-    def test_tx_power_service(self):
-        """Test GATT Server Setup: Tx Power
-
-        Test Tx Power as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.TX_POWER_SERVICE)
-
-    def test_user_data_service(self):
-        """Test GATT Server Setup: User Data
-
-        Test User Data as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.USER_DATA_SERVICE)
-
-    def test_weight_scale_service(self):
-        """Test GATT Server Setup: Weight Scale
-
-        Test Weight Scale as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.WEIGHT_SCALE_SERVICE)
-
-    ### End SIG defined services ###
diff --git a/src/antlion/tests/bt/gatt/gatt_server_databases.py b/src/antlion/tests/bt/gatt/gatt_server_databases.py
deleted file mode 100644
index ecdf40d..0000000
--- a/src/antlion/tests/bt/gatt/gatt_server_databases.py
+++ /dev/null
@@ -1,2491 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-GATT server dictionaries which will be setup in various tests.
-"""
-
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import gatt_char_types
-from antlion.test_utils.bt.bt_constants import gatt_characteristic_value_format
-from antlion.test_utils.bt.bt_constants import gatt_char_desc_uuids
-
-SINGLE_PRIMARY_SERVICE = {
-    'services': [{
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }]
-}
-
-SINGLE_SECONDARY_SERVICE = {
-    'services': [{
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['secondary'],
-    }]
-}
-
-PRIMARY_AND_SECONDARY_SERVICES = {
-    'services': [{
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }, {
-        'uuid': '00001803-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['secondary'],
-    }]
-}
-
-DUPLICATE_SERVICES = {
-    'services': [{
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }, {
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }]
-}
-
-### Begin SIG defined services ###
-# yapf: disable
-
-# TODO: Reconcile all the proper security parameters of each service.
-# Some are correct, others are not.
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.alert_notification.xml
-ALERT_NOTIFICATION_SERVICE = {
-    'services': [{
-        'uuid': '00001811-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a47-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a46-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a48-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }, {
-            'uuid': '00002a45-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a44-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.automation_io.xml
-AUTOMATION_IO_SERVICE = {
-    'services': [{
-        'uuid': '00001815-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a56-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': '00002904-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_ext_props']
-            }, {
-                'uuid': '0000290a-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290e-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '00002909-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }, {
-            'uuid': '00002a58-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'] |
-            gatt_characteristic['write_type_signed'] |
-            gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': '00002904-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_ext_props']
-            }, {
-                'uuid': '0000290a-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290e-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '00002909-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-            }, {
-                'uuid': '00002906-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }, {
-            'uuid': '00002a5a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.battery_service.xml
-BATTERY_SERVICE = {
-    'services': [{
-        'uuid': '0000180f-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a19-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': '00002904-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.blood_pressure.xml
-BLOOD_PRESSURE_SERVICE = {
-    'services': [{
-        'uuid': '00001810-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a35-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a36-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }],
-        }, {
-            'uuid': '00002a49-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.body_composition.xml
-BODY_COMPOSITION_SERVICE = {
-    'services': [{
-        'uuid': '0000181b-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a9b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a9c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }],
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.bond_management.xml
-BOND_MANAGEMENT_SERVICE = {
-    'services': [{
-        'uuid': '0000181e-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002aac-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002aa4-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }
-
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.continuous_glucose_monitoring.xml
-CONTINUOUS_GLUCOSE_MONITORING_SERVICE = {
-    'services': [{
-        'uuid': '0000180f-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002aa7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aa7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002aa8-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002aa9-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002aaa-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aab-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a52-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aac-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.current_time.xml
-CURRENT_TIME_SERVICE = {
-    'services': [{
-        'uuid': '00001805-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a2b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a0f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }],
-        }, {
-            'uuid': '00002a14-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.cycling_power.xml
-CYCLING_POWER_SERVICE = {
-    'services': [{
-        'uuid': '00001818-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a63-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            }, {
-                'uuid': gatt_char_desc_uuids['server_char_cfg'],
-            }]
-        }, {
-            'uuid': '00002a65-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a5d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a64-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a66-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.cycling_speed_and_cadence.xml
-CYCLING_SPEED_AND_CADENCE_SERVICE = {
-    'services': [{
-        'uuid': '00001816-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a5b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            }]
-        }, {
-            'uuid': '00002a5c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a5d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a55-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.device_information.xml
-DEVICE_INFORMATION_SERVICE = {
-    'services': [{
-        'uuid': '00001816-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a24-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a25-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a27-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a26-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a28-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a23-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a2a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a50-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.environmental_sensing.xml
-ENVIRONMENTAL_SENSING_SERVICE = {
-    'services': [{
-        'uuid': '0000181a-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a7d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_extended_props'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a73-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        }, {
-            'uuid': '00002a72-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        }, {
-            'uuid': '00002a7b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a6c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a74-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a7a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a6f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a77-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a75-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a78-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a6d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a6e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a71-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a76-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a79-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002aa3-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a2c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002aa0-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002aa1-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.fitness_machine.xml
-FITNESS_MACHINE_SERVICE = {
-    'services': [{
-        'uuid': '00001826-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002acc-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002acd-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ace-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002acf-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad0-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad1-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad2-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad3-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad4-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002ad5-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002ad6-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002ad8-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002ad7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002ad9-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ada-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-
-
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.glucose.xml
-GLUCOSE_SERVICE = {
-    'services': [{
-        'uuid': '00001808-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a18-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            }]
-        }, {
-            'uuid': '00002a34-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a51-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a52-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.health_thermometer.xml
-HEALTH_THERMOMETER_SERVICE = {
-    'services': [{
-        'uuid': '00001809-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a1c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            }]
-        }, {
-            'uuid': '00002a1d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a1e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a21-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.heart_rate.xml
-HEART_RATE_SERVICE = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a37-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a38-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-        }, {
-            'uuid': '00002a39-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.http_proxy.xml
-HTTP_PROXY_SERVICE = {
-    'services': [{
-        'uuid': '00001823-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002ab6-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['property_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002ab7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-        }, {
-            'uuid': '00002ab9-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 8,
-        }, {
-            'uuid': '00002aba-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 8,
-        }, {
-            'uuid': '00002ab8-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 2,
-        }, {
-            'uuid': '00002abb-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.human_interface_device.xml
-HUMAN_INTERFACE_DEVICE_SERVICE = {
-    'services': [{
-        'uuid': '00001812-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a4e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a4d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': '00002908-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }]
-        }, {
-            'uuid': '00002a4b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': '00002907-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }, {
-            'uuid': '00002a22-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        },{
-            'uuid': '00002a32-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_write'] |
-            gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }, {
-            'uuid': '00002a33-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a4a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }, {
-            'uuid': '00002a4c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.immediate_alert.xml
-IMMEDIATE_ALERT_SERVICE = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a06-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.indoor_positioning.xml
-INDOOR_POSITIONING_SERVICE = {
-    'services': [{
-        'uuid': '00001821-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a06-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a38-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aad-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aae-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aaf-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab0-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab1-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab2-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab3-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab4-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab5-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.insulin_delivery.xml
-INSULIN_DELIVERY_SERVICE = {
-    'services': [{
-        'uuid': '0000183a-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002b20-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b21-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b22-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b23-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1
-        }, {
-            'uuid': '00002b24-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b25-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b26-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b27-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b28-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.internet_protocol_support.xml
-INTERNET_PROTOCOL_SUPPORT_SERVICE = {
-    'services': [{
-        'uuid': '00001820-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.link_loss.xml
-LINK_LOSS_SERVICE = {
-    'services': [{
-        'uuid': '00001803-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a06-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.location_and_navigation.xml
-LOCATION_AND_NAVIGATION_SERVICE = {
-    'services': [{
-        'uuid': '00001819-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a6a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a67-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a69-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }, {
-            'uuid': '00002a6b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a68-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.mesh_provisioning.xml
-MESH_PROVISIONING_SERVICE = {
-    'services': [{
-        'uuid': '00001827-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002adb-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002adc-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.mesh_proxy.xml
-MESH_PROXY_SERVICE = {
-    'services': [{
-        'uuid': '00001828-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002add-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002ade-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.next_dst_change.xml
-NEXT_DST_CHANGE_SERVICE = {
-    'services': [{
-        'uuid': '00001807-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a11-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint32'],
-            'value': 1549903904,
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.object_transfer.xml
-OBJECT_TRANSFER_SERVICE = {
-    'services': [{
-        'uuid': '00001825-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002abd-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002abe-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002abf-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac0-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac1-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac2-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac3-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac4-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac5-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac6-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'] |
-            gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac8-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.phone_alert_status.xml
-PHONE_ALERT_STATUS_SERVICE = {
-    'services': [{
-        'uuid': '0000180e-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a3f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a41-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a40-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.pulse_oximeter.xml
-PULSE_OXIMETER_SERVICE = {
-    'services': [{
-        'uuid': '00001822-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a5e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a5f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a60-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }, {
-            'uuid': '00002a52-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.reconnection_configuration.xml
-RECONNECTION_CONFIGURATION_SERVICE = {
-    'services': [{
-        'uuid': '00001829-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002b1d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002b1e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b1f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.reference_time_update.xml
-REFERENCE_TIME_UPDATE_SERVICE = {
-    'services': [{
-        'uuid': '00001806-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a16-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a17-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.running_speed_and_cadence.xml
-RUNNING_SPEED_AND_CADENCE_SERVICE = {
-    'services': [{
-        'uuid': '00001814-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a53-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a54-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }, {
-            'uuid': '00002a5d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }, {
-            'uuid': '00002a55-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.scan_parameters.xml
-SCAN_PARAMETERS_SERVICE = {
-    'services': [{
-        'uuid': '00001813-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a4f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }, {
-            'uuid': '00002a31-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.transport_discovery.xml
-TRANSPORT_DISCOVERY_SERVICE = {
-    'services': [{
-        'uuid': '00001824-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002abc-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.tx_power.xml
-TX_POWER_SERVICE = {
-    'services': [{
-        'uuid': '00001804-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a07-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['uint8'],
-            'value': -24,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.user_data.xml
-USER_DATA_SERVICE = {
-    'services': [{
-        'uuid': '0000181c-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a8a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a90-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a87-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a80-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a85-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a8c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a98-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a8e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a96-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a92-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a91-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a7f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a83-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a93-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a86-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a97-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a8f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a88-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a89-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a7e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a84-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a81-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a82-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a8b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a94-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a95-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a99-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a9a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a9f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aa2-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.weight_scale.xml
-WEIGHT_SCALE_SERVICE = {
-    'services': [{
-        'uuid': '0000181d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a9e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1
-        }, {
-            'uuid': '00002a9d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 100,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-
-# yapf: enable
-### End SIG defined services ###
diff --git a/src/antlion/tests/bt/pts/GATT_PTS_INSTRUCTIONS b/src/antlion/tests/bt/pts/GATT_PTS_INSTRUCTIONS
deleted file mode 100644
index fcfccf4..0000000
--- a/src/antlion/tests/bt/pts/GATT_PTS_INSTRUCTIONS
+++ /dev/null
@@ -1,198 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-GATT
-=================================================================
-Note: Bug in PTS forces GATT operations to be over BR/EDR. To run tests over LE disable BR/EDR in ICS when running tests (ICS Name TSCP_GATT_2_1). To Run over BR/EDR re-enable the same ICS value.
-
-Note: While using ACTS cmd line tools, if there is ever an issue with connecting to PTS make sure the
-unique ID is properly set by running these commands:
-     tool_set_target_device_name PTS
-     tool_refresh_unique_id
-
-Cmd Line Tools in use:
-    ACTS:
-        FuchsiaCmdLineTest
-    Fuchsia CLI:
-        ...
-
-GATT/CL/GAC/BV-01-C
-    TBD
-
-GATT/CL/GAD/BV-01-C
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-02-C
-    Bug: BT-764
-
-GATT/CL/GAD/BV-03-C
-    Note: Bug BT-764 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify confirmation
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    [Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-04-C
-    Note: Bug BT-764 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify confirmation
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    [Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-05-C
-    Note: Bug BT-764 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify confirmation
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    [Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-06-C
-    Note: Bug BT-764 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_real_all_desc
-    [PTS Interaction] Verify confirmation
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_desc
-    [PTS Interaction] Verify values
-    [Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_desc
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_desc
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-07-C
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-
-GATT/CL/GAD/BV-08-C
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-
-GATTT/CL/GAR/BV-01-C
-    Note: Bug BT-451 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_read_all_chars
-    Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
diff --git a/src/antlion/tests/dhcp/BUILD.gn b/src/antlion/tests/dhcp/BUILD.gn
new file mode 100644
index 0000000..c3acdd3
--- /dev/null
+++ b/src/antlion/tests/dhcp/BUILD.gn
@@ -0,0 +1,38 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("dhcpv4_duplicate_address_test") {
+  main_source = "Dhcpv4DuplicateAddressTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("dhcpv4_interop_basic_test") {
+  main_source = "Dhcpv4InteropBasicTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("dhcpv4_interop_combinatorial_options_test") {
+  main_source = "Dhcpv4InteropCombinatorialOptionsTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("dhcpv4_interop_fixture_test") {
+  main_source = "Dhcpv4InteropFixtureTest.py"
+  environments = display_ap_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":dhcpv4_duplicate_address_test($host_toolchain)",
+    ":dhcpv4_interop_basic_test($host_toolchain)",
+    ":dhcpv4_interop_combinatorial_options_test($host_toolchain)",
+    ":dhcpv4_interop_fixture_test($host_toolchain)",
+  ]
+}
diff --git a/src/antlion/tests/dhcp/Dhcpv4DuplicateAddressTest.py b/src/antlion/tests/dhcp/Dhcpv4DuplicateAddressTest.py
new file mode 100644
index 0000000..4614e59
--- /dev/null
+++ b/src/antlion/tests/dhcp/Dhcpv4DuplicateAddressTest.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+from antlion.controllers.ap_lib import dhcp_config
+from antlion.controllers.utils_lib.commands import ip
+from antlion.test_utils.dhcp import base_test
+
+from mobly import asserts, test_runner
+
+
+class Dhcpv4DuplicateAddressTest(base_test.Dhcpv4InteropFixture):
+    def setup_test(self):
+        super().setup_test()
+        self.extra_addresses = []
+        self.ap_params = self.setup_ap()
+        self.ap_ip_cmd = ip.LinuxIpCommand(self.access_point.ssh)
+
+    def teardown_test(self):
+        super().teardown_test()
+        for ip in self.extra_addresses:
+            self.ap_ip_cmd.remove_ipv4_address(self.ap_params["id"], ip)
+
+    def test_duplicate_address_assignment(self):
+        """It's possible for a DHCP server to assign an address that already exists on the network.
+        DHCP clients are expected to perform a "gratuitous ARP" of the to-be-assigned address, and
+        refuse to assign that address. Clients should also recover by asking for a different
+        address.
+        """
+        # Modify subnet to hold fewer addresses.
+        # A '/29' has 8 addresses (6 usable excluding router / broadcast)
+        subnet = next(self.ap_params["network"].subnets(new_prefix=29))
+        subnet_conf = dhcp_config.Subnet(
+            subnet=subnet,
+            router=self.ap_params["ip"],
+            # When the DHCP server is considering dynamically allocating an IP address to a client,
+            # it first sends an ICMP Echo request (a ping) to the address being assigned. It waits
+            # for a second, and if no ICMP Echo response has been heard, it assigns the address.
+            # If a response is heard, the lease is abandoned, and the server does not respond to
+            # the client.
+            # The ping-check configuration parameter can be used to control checking - if its value
+            # is false, no ping check is done.
+            additional_parameters={"ping-check": "false"},
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+
+        # Add each of the usable IPs as an alias for the router's interface, such that the router
+        # will respond to any pings on it.
+        for ip in subnet.hosts():
+            self.ap_ip_cmd.add_ipv4_address(self.ap_params["id"], ip)
+            # Ensure we remove the address in self.teardown_test() even if the test fails
+            self.extra_addresses.append(ip)
+
+        self.connect(ap_params=self.ap_params)
+        with asserts.assert_raises(ConnectionError):
+            self.get_device_ipv4_addr()
+
+        # Per spec, the flow should be:
+        # Discover -> Offer -> Request -> Ack -> client optionally performs DAD
+        dhcp_logs = self.access_point.get_dhcp_logs()
+        for expected_message in [
+            r"DHCPDISCOVER from \S+",
+            r"DHCPOFFER on [0-9.]+ to \S+",
+            r"DHCPREQUEST for [0-9.]+",
+            r"DHCPACK on [0-9.]+",
+            r"DHCPDECLINE of [0-9.]+ from \S+ via .*: abandoned",
+            r"Abandoning IP address [0-9.]+: declined",
+        ]:
+            asserts.assert_true(
+                re.search(expected_message, dhcp_logs),
+                f"Did not find expected message ({expected_message}) in dhcp logs: {dhcp_logs}"
+                + "\n",
+            )
+
+        # Remove each of the IP aliases.
+        # Note: this also removes the router's address (e.g. 192.168.1.1), so pinging the
+        # router after this will not work.
+        while self.extra_addresses:
+            self.ap_ip_cmd.remove_ipv4_address(
+                self.ap_params["id"], self.extra_addresses.pop()
+            )
+
+        # Now, we should get an address successfully
+        ip = self.get_device_ipv4_addr()
+        dhcp_logs = self.access_point.get_dhcp_logs()
+
+        expected_string = f"DHCPREQUEST for {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 1,
+            f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+        expected_string = f"DHCPACK on {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 1,
+            f'Incorrect count of DHCP Acks ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropBasicTest.py b/src/antlion/tests/dhcp/Dhcpv4InteropBasicTest.py
new file mode 100644
index 0000000..b3d1ce9
--- /dev/null
+++ b/src/antlion/tests/dhcp/Dhcpv4InteropBasicTest.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+import re
+
+from antlion.controllers.ap_lib import dhcp_config
+from antlion.test_utils.dhcp import base_test
+
+from mobly import asserts, test_runner
+
+
+class Dhcpv4InteropBasicTest(base_test.Dhcpv4InteropFixture):
+    """DhcpV4 tests which validate basic DHCP client/server interactions."""
+
+    def test_basic_dhcp_assignment(self):
+        self.run_test_case_expect_dhcp_success(
+            "basic_dhcp_assignment",
+            settings={"dhcp_options": {}, "dhcp_parameters": {}},
+        )
+
+    def test_pool_allows_unknown_clients(self):
+        self.run_test_case_expect_dhcp_success(
+            "pool_allows_unknown_clients",
+            settings={
+                "dhcp_options": {},
+                "dhcp_parameters": {"allow": "unknown-clients"},
+            },
+        )
+
+    def test_pool_disallows_unknown_clients(self):
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params["network"],
+            router=ap_params["ip"],
+            additional_parameters={"deny": "unknown-clients"},
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+
+        self.connect(ap_params=ap_params)
+        with asserts.assert_raises(ConnectionError):
+            self.get_device_ipv4_addr()
+
+        dhcp_logs = self.access_point.get_dhcp_logs()
+        asserts.assert_true(
+            re.search(r"DHCPDISCOVER from .*no free leases", dhcp_logs),
+            "Did not find expected message in dhcp logs: " + dhcp_logs + "\n",
+        )
+
+    def test_lease_renewal(self):
+        """Validates that a client renews their DHCP lease."""
+        LEASE_TIME = 30
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params["network"], router=ap_params["ip"]
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(
+            subnets=[subnet_conf],
+            default_lease_time=LEASE_TIME,
+            max_lease_time=LEASE_TIME,
+        )
+        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+        self.connect(ap_params=ap_params)
+        ip = self.get_device_ipv4_addr()
+
+        SLEEP_TIME = LEASE_TIME + 3
+        self.log.info(f"Sleeping {SLEEP_TIME}s to await DHCP renewal")
+        time.sleep(SLEEP_TIME)
+
+        dhcp_logs = self.access_point.get_dhcp_logs()
+        # Fuchsia renews at LEASE_TIME / 2, so there should be at least 2 DHCPREQUESTs in logs.
+        # The log lines look like:
+        # INFO dhcpd[17385]: DHCPREQUEST for 192.168.9.2 from f8:0f:f9:3d:ce:d1 via wlan1
+        # INFO dhcpd[17385]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
+        expected_string = f"DHCPREQUEST for {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 2,
+            f'Not enough DHCP renewals ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py b/src/antlion/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
new file mode 100644
index 0000000..7e7b379
--- /dev/null
+++ b/src/antlion/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import random
+
+from collections import namedtuple
+from typing import Dict, Union
+
+from antlion.test_utils.dhcp import base_test
+
+from mobly import asserts, test_runner
+
+OPT_NUM_DOMAIN_SEARCH = 119
+OPT_NUM_DOMAIN_NAME = 15
+
+Test = namedtuple(typename="Args", field_names=["name", "settings"])
+
+
+class Dhcpv4InteropCombinatorialOptionsTest(base_test.Dhcpv4InteropFixture):
+    """DhcpV4 tests which validate combinations of DHCP options."""
+
+    def setup_generated_tests(self) -> None:
+        self.generate_tests(
+            self.run_test_case_expect_dhcp_success,
+            lambda name, *_: f"test_{name}",
+            [
+                Test(
+                    "domain_name_valid",
+                    {
+                        "dhcp_options": {
+                            "domain-name": '"example.test"',
+                            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME,
+                        },
+                        "dhcp_parameters": {},
+                    },
+                ),
+                Test(
+                    "domain_name_invalid",
+                    {
+                        "dhcp_options": {
+                            "domain-name": '"example.invalid"',
+                            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME,
+                        },
+                        "dhcp_parameters": {},
+                    },
+                ),
+                Test(
+                    "domain_search_valid",
+                    {
+                        "dhcp_options": {
+                            "domain-name": '"example.test"',
+                            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
+                        },
+                        "dhcp_parameters": {},
+                    },
+                ),
+                Test(
+                    "domain_search_invalid",
+                    {
+                        "dhcp_options": {
+                            "domain-name": '"example.invalid"',
+                            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
+                        },
+                        "dhcp_parameters": {},
+                    },
+                ),
+                Test(
+                    "max_sized_message",
+                    {
+                        "dhcp_options": self._generate_max_sized_message_dhcp_options(),
+                        "dhcp_parameters": {},
+                    },
+                ),
+            ],
+        )
+
+    def _generate_max_sized_message_dhcp_options(self) -> Dict[str, Union[int, str]]:
+        """Generates the DHCP options for max sized message test.
+
+        The RFC limits DHCP payloads to 576 bytes unless the client signals it
+        can handle larger payloads, which it does by sending DHCP option 57,
+        "Maximum DHCP Message Size". Despite being able to accept larger
+        payloads, clients typically don't advertise this. The test verifies that
+        the client accepts a large message split across multiple ethernet
+        frames. The test is created by sending many bytes of options through the
+        domain-name-servers option, which is of unbounded length (though is
+        compressed per RFC1035 section 4.1.4).
+
+        Returns:
+            A dict of DHCP options.
+        """
+        typical_ethernet_mtu = 1500
+
+        long_dns_setting = ", ".join(
+            f'"ns{num}.example"'
+            for num in random.sample(range(100_000, 1_000_000), 250)
+        )
+        # RFC1035 compression means any shared suffix ('.example' in this case)
+        # will be deduplicated. Calculate approximate length by removing that
+        # suffix.
+        long_dns_setting_len = len(
+            long_dns_setting.replace(", ", "")
+            .replace('"', "")
+            .replace(".example", "")
+            .encode("utf-8")
+        )
+        asserts.assert_true(
+            long_dns_setting_len > typical_ethernet_mtu,
+            "Expected to generate message greater than ethernet mtu",
+        )
+
+        return {
+            "dhcp-max-message-size": long_dns_setting_len * 2,
+            "domain-search": long_dns_setting,
+            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
+        }
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropFixtureTest.py b/src/antlion/tests/dhcp/Dhcpv4InteropFixtureTest.py
new file mode 100644
index 0000000..ebbf866
--- /dev/null
+++ b/src/antlion/tests/dhcp/Dhcpv4InteropFixtureTest.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion.controllers.ap_lib import dhcp_config
+from antlion.test_utils.dhcp import base_test
+
+from mobly import asserts, test_runner
+
+
+class Dhcpv4InteropFixtureTest(base_test.Dhcpv4InteropFixture):
+    """Tests which validate the behavior of the Dhcpv4InteropFixture.
+
+    In theory, these are more similar to unit tests than ACTS tests, but
+    since they interact with hardware (specifically, the AP), we have to
+    write and run them like the rest of the ACTS tests."""
+
+    def test_invalid_options_not_accepted(self):
+        """Ensures the DHCP server doesn't accept invalid options"""
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params["network"],
+            router=ap_params["ip"],
+            additional_options={"foo": "bar"},
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+        with asserts.assert_raises_regex(Exception, r"failed to start"):
+            self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+
+    def test_invalid_parameters_not_accepted(self):
+        """Ensures the DHCP server doesn't accept invalid parameters"""
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params["network"],
+            router=ap_params["ip"],
+            additional_parameters={"foo": "bar"},
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+        with asserts.assert_raises_regex(Exception, r"failed to start"):
+            self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+
+    def test_no_dhcp_server_started(self):
+        """Validates that the test fixture does not start a DHCP server."""
+        ap_params = self.setup_ap()
+        self.connect(ap_params=ap_params)
+        with asserts.assert_raises(ConnectionError):
+            self.get_device_ipv4_addr()
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropTest.py b/src/antlion/tests/dhcp/Dhcpv4InteropTest.py
deleted file mode 100644
index 89aef29..0000000
--- a/src/antlion/tests/dhcp/Dhcpv4InteropTest.py
+++ /dev/null
@@ -1,515 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import random
-import time
-import re
-
-from antlion import asserts
-from antlion import utils
-from antlion.controllers.access_point import setup_ap, AccessPoint
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.controllers.utils_lib.commands import ip
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-
-class Dhcpv4InteropFixture(WifiBaseTest):
-    """Test helpers for validating DHCPv4 Interop
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    """
-    access_point: AccessPoint
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.access_point = self.access_points[0]
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.access_point.stop_all_aps()
-
-    def connect(self, ap_params):
-        asserts.assert_true(
-            self.dut.associate(ap_params['ssid'],
-                               target_pwd=ap_params['password'],
-                               target_security=ap_params['target_security']),
-            'Failed to connect.')
-
-    def setup_ap(self):
-        """Generates a hostapd config and sets up the AP with that config.
-        Does not run a DHCP server.
-
-        Returns: A dictionary of information about the AP.
-        """
-        ssid = utils.rand_ascii_str(20)
-        security_mode = hostapd_constants.WPA2_STRING
-        security_profile = Security(
-            security_mode=security_mode,
-            password=generate_random_password(length=20),
-            wpa_cipher='CCMP',
-            wpa2_cipher='CCMP')
-        password = security_profile.password
-        target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-            security_mode)
-
-        ap_ids = setup_ap(access_point=self.access_point,
-                          profile_name='whirlwind',
-                          mode=hostapd_constants.MODE_11N_MIXED,
-                          channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                          n_capabilities=[],
-                          ac_capabilities=[],
-                          force_wmm=True,
-                          ssid=ssid,
-                          security=security_profile,
-                          password=password)
-
-        if len(ap_ids) > 1:
-            raise Exception("Expected only one SSID on AP")
-
-        configured_subnets = self.access_point.get_configured_subnets()
-        if len(configured_subnets) > 1:
-            raise Exception("Expected only one subnet on AP")
-        router_ip = configured_subnets[0].router
-        network = configured_subnets[0].network
-
-        self.access_point.stop_dhcp()
-
-        return {
-            'ssid': ssid,
-            'password': password,
-            'target_security': target_security,
-            'ip': router_ip,
-            'network': network,
-            'id': ap_ids[0],
-        }
-
-    def device_can_ping(self, dest_ip):
-        """Checks if the DUT can ping the given address.
-
-        Returns: True if can ping, False otherwise"""
-        self.log.info('Attempting to ping %s...' % dest_ip)
-        ping_result = self.dut.can_ping(dest_ip, count=2)
-        if ping_result:
-            self.log.info('Success pinging: %s' % dest_ip)
-        else:
-            self.log.info('Failure pinging: %s' % dest_ip)
-        return ping_result
-
-    def get_device_ipv4_addr(self, interface=None, timeout=20):
-        """Checks if device has an ipv4 private address. Sleeps 1 second between
-        retries.
-
-        Args:
-            interface: string, name of interface from which to get ipv4 address.
-
-        Raises:
-            ConnectionError, if DUT does not have an ipv4 address after all
-            timeout.
-
-        Returns:
-            The device's IP address
-
-        """
-        self.log.debug('Fetching updated WLAN interface list')
-        if interface is None:
-            interface = self.dut.device.wlan_client_test_interface_name
-        self.log.info(
-            'Checking if DUT has received an ipv4 addr on iface %s. Will retry for %s '
-            'seconds.' % (interface, timeout))
-        timeout = time.time() + timeout
-        while time.time() < timeout:
-            ip_addrs = self.dut.device.get_interface_ip_addresses(interface)
-
-            if len(ip_addrs['ipv4_private']) > 0:
-                ip = ip_addrs['ipv4_private'][0]
-                self.log.info('DUT has an ipv4 address: %s' % ip)
-                return ip
-            else:
-                self.log.debug(
-                    'DUT does not yet have an ipv4 address...retrying in 1 '
-                    'second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError('DUT failed to get an ipv4 address.')
-
-    def run_test_case_expect_dhcp_success(self, settings):
-        """Starts the AP and DHCP server, and validates that the client
-        connects and obtains an address.
-
-        Args:
-            settings: a dictionary containing:
-                dhcp_parameters: a dictionary of DHCP parameters
-                dhcp_options: a dictionary of DHCP options
-        """
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(
-            subnet=ap_params['network'],
-            router=ap_params['ip'],
-            additional_parameters=settings['dhcp_parameters'],
-            additional_options=settings['dhcp_options'])
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-
-        self.log.debug('DHCP Configuration:\n' +
-                       dhcp_conf.render_config_file() + "\n")
-
-        dhcp_logs_before = self.access_point.get_dhcp_logs().split('\n')
-        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-        self.connect(ap_params=ap_params)
-
-        # Typical log lines look like:
-        # dhcpd[26695]: DHCPDISCOVER from f8:0f:f9:3d:ce:d1 via wlan1
-        # dhcpd[26695]: DHCPOFFER on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
-        # dhcpd[26695]: DHCPREQUEST for 192.168.9.2 (192.168.9.1) from f8:0f:f9:3d:ce:d1 via wlan1
-        # dhcpd[26695]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
-
-        try:
-            ip = self.get_device_ipv4_addr()
-        except ConnectionError:
-            self.log.warn(dhcp_logs)
-            asserts.fail(f'DUT failed to get an IP address')
-
-        # Get updates to DHCP logs
-        dhcp_logs = self.access_point.get_dhcp_logs()
-        for line in dhcp_logs_before:
-            dhcp_logs = dhcp_logs.replace(line, '')
-
-        expected_string = f'DHCPDISCOVER from'
-        asserts.assert_equal(
-            dhcp_logs.count(expected_string), 1,
-            f'Incorrect count of DHCP Discovers ("{expected_string}") in logs:\n'
-            + dhcp_logs + "\n")
-
-        expected_string = f'DHCPOFFER on {ip}'
-        asserts.assert_equal(
-            dhcp_logs.count(expected_string), 1,
-            f'Incorrect count of DHCP Offers ("{expected_string}") in logs:\n'
-            + dhcp_logs + "\n")
-
-        expected_string = f'DHCPREQUEST for {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
-            + dhcp_logs + "\n")
-
-        expected_string = f'DHCPACK on {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Acks ("{expected_string}") in logs: ' +
-            dhcp_logs + "\n")
-
-        asserts.assert_true(self.device_can_ping(ap_params['ip']),
-                            f'DUT failed to ping router at {ap_params["ip"]}')
-
-
-class Dhcpv4InteropFixtureTest(Dhcpv4InteropFixture):
-    """Tests which validate the behavior of the Dhcpv4InteropFixture.
-
-    In theory, these are more similar to unit tests than ACTS tests, but
-    since they interact with hardware (specifically, the AP), we have to
-    write and run them like the rest of the ACTS tests."""
-
-    def test_invalid_options_not_accepted(self):
-        """Ensures the DHCP server doesn't accept invalid options"""
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(subnet=ap_params['network'],
-                                         router=ap_params['ip'],
-                                         additional_options={'foo': 'bar'})
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-        with asserts.assert_raises_regex(Exception, r'failed to start'):
-            self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
-    def test_invalid_parameters_not_accepted(self):
-        """Ensures the DHCP server doesn't accept invalid parameters"""
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(subnet=ap_params['network'],
-                                         router=ap_params['ip'],
-                                         additional_parameters={'foo': 'bar'})
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-        with asserts.assert_raises_regex(Exception, r'failed to start'):
-            self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
-    def test_no_dhcp_server_started(self):
-        """Validates that the test fixture does not start a DHCP server."""
-        ap_params = self.setup_ap()
-        self.connect(ap_params=ap_params)
-        with asserts.assert_raises(ConnectionError):
-            self.get_device_ipv4_addr()
-
-
-class Dhcpv4InteropBasicTest(Dhcpv4InteropFixture):
-    """DhcpV4 tests which validate basic DHCP client/server interactions."""
-
-    def test_basic_dhcp_assignment(self):
-        self.run_test_case_expect_dhcp_success(settings={
-            'dhcp_options': {},
-            'dhcp_parameters': {}
-        })
-
-    def test_pool_allows_unknown_clients(self):
-        self.run_test_case_expect_dhcp_success(settings={
-            'dhcp_options': {},
-            'dhcp_parameters': {
-                'allow': 'unknown-clients'
-            }
-        })
-
-    def test_pool_disallows_unknown_clients(self):
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(
-            subnet=ap_params['network'],
-            router=ap_params['ip'],
-            additional_parameters={'deny': 'unknown-clients'})
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
-        self.connect(ap_params=ap_params)
-        with asserts.assert_raises(ConnectionError):
-            self.get_device_ipv4_addr()
-
-        dhcp_logs = self.access_point.get_dhcp_logs()
-        asserts.assert_true(
-            re.search(r'DHCPDISCOVER from .*no free leases', dhcp_logs),
-            "Did not find expected message in dhcp logs: " + dhcp_logs + "\n")
-
-    def test_lease_renewal(self):
-        """Validates that a client renews their DHCP lease."""
-        LEASE_TIME = 30
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(subnet=ap_params['network'],
-                                         router=ap_params['ip'])
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf],
-                                           default_lease_time=LEASE_TIME,
-                                           max_lease_time=LEASE_TIME)
-        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-        self.connect(ap_params=ap_params)
-        ip = self.get_device_ipv4_addr()
-
-        dhcp_logs_before = self.access_point.get_dhcp_logs()
-        SLEEP_TIME = LEASE_TIME + 3
-        self.log.info(f'Sleeping {SLEEP_TIME}s to await DHCP renewal')
-        time.sleep(SLEEP_TIME)
-
-        dhcp_logs_after = self.access_point.get_dhcp_logs()
-        dhcp_logs = dhcp_logs_after.replace(dhcp_logs_before, '')
-        # Fuchsia renews at LEASE_TIME / 2, so there should be at least 2 DHCPREQUESTs in logs.
-        # The log lines look like:
-        # INFO dhcpd[17385]: DHCPREQUEST for 192.168.9.2 from f8:0f:f9:3d:ce:d1 via wlan1
-        # INFO dhcpd[17385]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
-        expected_string = f'DHCPREQUEST for {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 2,
-            f'Not enough DHCP renewals ("{expected_string}") in logs: ' +
-            dhcp_logs + "\n")
-
-
-class Dhcpv4DuplicateAddressTest(Dhcpv4InteropFixture):
-
-    def setup_test(self):
-        super().setup_test()
-        self.extra_addresses = []
-        self.ap_params = self.setup_ap()
-        self.ap_ip_cmd = ip.LinuxIpCommand(self.access_point.ssh)
-
-    def teardown_test(self):
-        super().teardown_test()
-        for ip in self.extra_addresses:
-            self.ap_ip_cmd.remove_ipv4_address(self.ap_params['id'], ip)
-
-    def test_duplicate_address_assignment(self):
-        """It's possible for a DHCP server to assign an address that already exists on the network.
-        DHCP clients are expected to perform a "gratuitous ARP" of the to-be-assigned address, and
-        refuse to assign that address. Clients should also recover by asking for a different
-        address.
-        """
-        # Modify subnet to hold fewer addresses.
-        # A '/29' has 8 addresses (6 usable excluding router / broadcast)
-        subnet = next(self.ap_params['network'].subnets(new_prefix=29))
-        subnet_conf = dhcp_config.Subnet(
-            subnet=subnet,
-            router=self.ap_params['ip'],
-            # When the DHCP server is considering dynamically allocating an IP address to a client,
-            # it first sends an ICMP Echo request (a ping) to the address being assigned. It waits
-            # for a second, and if no ICMP Echo response has been heard, it assigns the address.
-            # If a response is heard, the lease is abandoned, and the server does not respond to
-            # the client.
-            # The ping-check configuration parameter can be used to control checking - if its value
-            # is false, no ping check is done.
-            additional_parameters={'ping-check': 'false'})
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
-        # Add each of the usable IPs as an alias for the router's interface, such that the router
-        # will respond to any pings on it.
-        for ip in subnet.hosts():
-            self.ap_ip_cmd.add_ipv4_address(self.ap_params['id'], ip)
-            # Ensure we remove the address in self.teardown_test() even if the test fails
-            self.extra_addresses.append(ip)
-
-        self.connect(ap_params=self.ap_params)
-        with asserts.assert_raises(ConnectionError):
-            self.get_device_ipv4_addr()
-
-        # Per spec, the flow should be:
-        # Discover -> Offer -> Request -> Ack -> client optionally performs DAD
-        dhcp_logs = self.access_point.get_dhcp_logs()
-        for expected_message in [
-                r'DHCPDISCOVER from \S+',
-                r'DHCPOFFER on [0-9.]+ to \S+',
-                r'DHCPREQUEST for [0-9.]+',
-                r'DHCPACK on [0-9.]+',
-                r'DHCPDECLINE of [0-9.]+ from \S+ via .*: abandoned',
-                r'Abandoning IP address [0-9.]+: declined',
-        ]:
-            asserts.assert_true(
-                re.search(expected_message, dhcp_logs),
-                f'Did not find expected message ({expected_message}) in dhcp logs: {dhcp_logs}'
-                + "\n")
-
-        # Remove each of the IP aliases.
-        # Note: this also removes the router's address (e.g. 192.168.1.1), so pinging the
-        # router after this will not work.
-        while self.extra_addresses:
-            self.ap_ip_cmd.remove_ipv4_address(self.ap_params['id'],
-                                               self.extra_addresses.pop())
-
-        # Now, we should get an address successfully
-        ip = self.get_device_ipv4_addr()
-        dhcp_logs = self.access_point.get_dhcp_logs()
-
-        expected_string = f'DHCPREQUEST for {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
-            + dhcp_logs + "\n")
-
-        expected_string = f'DHCPACK on {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Acks ("{expected_string}") in logs: ' +
-            dhcp_logs + "\n")
-
-
-class Dhcpv4InteropCombinatorialOptionsTest(Dhcpv4InteropFixture):
-    """DhcpV4 tests which validate combinations of DHCP options."""
-    OPT_NUM_DOMAIN_SEARCH = 119
-    OPT_NUM_DOMAIN_NAME = 15
-
-    def setup_generated_tests(self):
-        self._generate_dhcp_options()
-
-        test_args = []
-        for test in self.DHCP_OPTIONS:
-            for option_list in self.DHCP_OPTIONS[test]:
-                test_args.append(({
-                    'dhcp_options': option_list,
-                    'dhcp_parameters': {}
-                }, ))
-
-        self.generate_tests(test_logic=self.run_test_case_expect_dhcp_success,
-                            name_func=self.generate_test_name,
-                            arg_sets=test_args)
-
-    def generate_test_name(self, settings):
-        return settings["dhcp_options"]["test_name"]
-
-    def _generate_dhcp_options(self):
-        self.DHCP_OPTIONS = {
-            'domain-name-tests': [{
-                'domain-name': '"example.invalid"',
-                'dhcp-parameter-request-list': self.OPT_NUM_DOMAIN_NAME,
-                'test_name': "test_domain_name_invalid_tld"
-            }, {
-                'domain-name': '"example.test"',
-                'dhcp-parameter-request-list': self.OPT_NUM_DOMAIN_NAME,
-                'test_name': "test_domain_name_valid_tld"
-            }],
-            'domain-search-tests': [{
-                'domain-search':
-                '"example.invalid"',
-                'dhcp-parameter-request-list':
-                self.OPT_NUM_DOMAIN_SEARCH,
-                'test_name':
-                "test_domain_search_invalid_tld"
-            }, {
-                'domain-search': '"example.test"',
-                'dhcp-parameter-request-list': self.OPT_NUM_DOMAIN_SEARCH,
-                'test_name': "test_domain_search_valid_tld"
-            }]
-        }
-
-        # The RFC limits DHCP payloads to 576 bytes unless the client signals it can handle larger
-        # payloads, which it does by sending DHCP option 57, "Maximum DHCP Message Size". Despite
-        # being able to accept larger payloads, clients typically don't advertise this.
-        # The test verifies that the client accepts a large message split across multiple ethernet
-        # frames.
-        # The test is created by sending many bytes of options through the domain-name-servers
-        # option, which is of unbounded length (though is compressed per RFC1035 section 4.1.4).
-        typical_ethernet_mtu = 1500
-        self.DHCP_OPTIONS['max-message-size-tests'] = []
-
-        long_dns_setting = ', '.join(
-            f'"ns{num}.example"'
-            for num in random.sample(range(100_000, 1_000_000), 250))
-        # RFC1035 compression means any shared suffix ('.example' in this case) will
-        # be deduplicated. Calculate approximate length by removing that suffix.
-        long_dns_setting_len = len(
-            long_dns_setting.replace(', ', '').replace('"', '').replace(
-                '.example', '').encode('utf-8'))
-        asserts.assert_true(
-            long_dns_setting_len > typical_ethernet_mtu,
-            "Expected to generate message greater than ethernet mtu")
-        self.DHCP_OPTIONS['max-message-size-tests'].append({
-            'dhcp-max-message-size':
-            long_dns_setting_len * 2,
-            'domain-search':
-            long_dns_setting,
-            'dhcp-parameter-request-list':
-            self.OPT_NUM_DOMAIN_SEARCH,
-            'test_name':
-            "test_max_sized_message",
-        })
diff --git a/src/antlion/tests/examples/BUILD.gn b/src/antlion/tests/examples/BUILD.gn
new file mode 100644
index 0000000..066d515
--- /dev/null
+++ b/src/antlion/tests/examples/BUILD.gn
@@ -0,0 +1,27 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("sl4f_sanity_test") {
+  main_source = "Sl4fSanityTest.py"
+  environments = display_envs + [ qemu_env ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    ":sl4f_sanity_test($host_toolchain)",
+  ]
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":sl4f_sanity_test($host_toolchain)",
+  ]
+}
diff --git a/src/antlion/tests/examples/Sl4fSanityTest.py b/src/antlion/tests/examples/Sl4fSanityTest.py
index b8429af..82c04f3 100644
--- a/src/antlion/tests/examples/Sl4fSanityTest.py
+++ b/src/antlion/tests/examples/Sl4fSanityTest.py
@@ -14,35 +14,35 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-Script for verifying SL4F is running on a Fuchsia device and
-can communicate to ACTS successfully.
-
+Test to verify SL4F is running on a Fuchsia device and can communicate with
+antlion successfully.
 """
+
+import logging
 from typing import List
 
-from antlion import asserts
-from antlion.base_test import BaseTestClass
+from antlion.controllers import fuchsia_device
 from antlion.controllers.fuchsia_device import FuchsiaDevice
 
+from mobly import asserts, test_runner, base_test
 
-class Sl4fSanityTest(BaseTestClass):
-    fuchsia_devices: List[FuchsiaDevice]
 
+class Sl4fSanityTest(base_test.BaseTestClass):
     def setup_class(self):
-        super().setup_class()
-
-        asserts.abort_class_if(
-            len(self.fuchsia_devices) == 0,
-            "Sorry, please try verifying FuchsiaDevice is in your config file and try again."
+        self.log = logging.getLogger()
+        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
+            fuchsia_device
         )
 
-        self.log.info(
-            "Congratulations! Fuchsia controllers have been initialized successfully!"
+        asserts.abort_class_if(
+            len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device"
         )
 
     def test_example(self):
         for fuchsia_device in self.fuchsia_devices:
             res = fuchsia_device.sl4f.netstack_lib.netstackListInterfaces()
             self.log.info(res)
-        self.log.info("Congratulations! You've run your first test.")
-        return True
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/flash/FlashTest.py b/src/antlion/tests/flash/FlashTest.py
index 32676d4..7c5399f 100644
--- a/src/antlion/tests/flash/FlashTest.py
+++ b/src/antlion/tests/flash/FlashTest.py
@@ -18,19 +18,28 @@
 the Sponge test result properties. Uses the built in flashing tool for
 fuchsia_devices.
 """
-from antlion import asserts
-from antlion import signals
-from antlion.base_test import BaseTestClass
+
+import logging
+from typing import List
+
+from antlion.controllers import fuchsia_device, pdu
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.pdu import PduDevice
 from antlion.utils import get_device
 
+from mobly import asserts, base_test, signals, test_runner
+
 MAX_FLASH_ATTEMPTS = 3
 
 
-class FlashTest(BaseTestClass):
-
+class FlashTest(base_test.BaseTestClass):
     def setup_class(self):
-        super().setup_class()
-        self.failed_to_get_version = False
+        self.log = logging.getLogger()
+        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
+            fuchsia_device
+        )
+        self.pdu_devices: List[PduDevice] = self.register_controller(pdu)
+        self.failed_to_get_device_info = False
 
     def teardown_class(self):
         # Verify that FlashTest successfully reported the DUT version. This is
@@ -40,50 +49,75 @@
         # TODO(http://b/253515812): This has been fixed in Mobly already. Remove
         # teardown_class and change "TestError" to "abort_all" in
         # test_flash_devices once we move to Mobly.
-        if self.failed_to_get_version:
-            asserts.abort_all('Failed to get DUT version')
+        if self.failed_to_get_device_info:
+            asserts.abort_all("Failed to get DUT device information")
 
         return super().teardown_class()
 
-    def test_flash_devices(self):
+    def test_flash_devices(self) -> None:
+        """Flashes a Fuchsia device for testing.
+
+        This method calls the fuchsia_device reboot() with 'flash' argument.
+        This kicks off a flash, not pave, of the fuchsia device. It also soft
+        reboots the device. On error it will attempt to reflash up to
+        MAX_FLASH_ATTEMPTS hard rebooting inbetween each attempt.
+        """
         for device in self.fuchsia_devices:
             flash_counter = 0
             while True:
                 try:
-                    device.reboot(reboot_type='flash',
-                                  use_ssh=True,
-                                  unreachable_timeout=120,
-                                  ping_timeout=120)
-                    self.log.info(f'{device.orig_ip} has been flashed.')
+                    device.reboot(
+                        reboot_type="flash", use_ssh=True, unreachable_timeout=120
+                    )
+                    self.log.info(f"{device.orig_ip} has been flashed.")
                     break
                 except Exception as err:
                     self.log.error(
-                        f'Failed to flash {device.orig_ip} with error:\n{err}')
+                        f"Failed to flash {device.orig_ip} with error:\n{err}"
+                    )
 
                     if not device.device_pdu_config:
                         asserts.abort_all(
-                            f'Failed to flash {device.orig_ip} and no PDU available for hard reboot'
+                            f"Failed to flash {device.orig_ip} and no PDU"
+                            "available for hard reboot"
                         )
 
                     flash_counter = flash_counter + 1
                     if flash_counter == MAX_FLASH_ATTEMPTS:
                         asserts.abort_all(
-                            f'Failed to flash {device.orig_ip} after {MAX_FLASH_ATTEMPTS} attempts'
+                            f"Failed to flash {device.orig_ip} after"
+                            f"{MAX_FLASH_ATTEMPTS} attempts"
                         )
 
                     self.log.info(
-                        f'Hard rebooting {device.orig_ip} and retrying flash.')
-                    device.reboot(reboot_type='hard',
-                                  testbed_pdus=self.pdu_devices)
+                        f"Hard rebooting {device.orig_ip} and retrying flash."
+                    )
+                    device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices)
 
         # Report the new Fuchsia version
         try:
-            dut = get_device(self.fuchsia_devices, 'DUT')
+            dut = get_device(self.fuchsia_devices, "DUT")
             version = dut.version()
-            self.record_data({'sponge_properties': {
-                'DUT_VERSION': version,
-            }})
-            self.log.info("DUT version found: {}".format(version))
+            device_name = dut.device_name()
+            product_name = dut.product_name()
+
+            self.record_data(
+                {
+                    "sponge_properties": {
+                        "DUT_VERSION": version,
+                        "DUT_NAME": device_name,
+                        "DUT_PRODUCT": product_name,
+                    },
+                }
+            )
+
+            self.log.info(f"DUT version: {version}")
+            self.log.info(f"DUT name: {device_name}")
+            self.log.info(f"DUT product: {product_name}")
         except Exception as e:
-            self.failed_to_get_version = True
-            raise signals.TestError(f'Failed to get DUT version: {e}') from e
+            self.failed_to_get_device_info = True
+            raise signals.TestError(f"Failed to get DUT device information: {e}") from e
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/logging/FuchsiaLoggingTest.py b/src/antlion/tests/logging/FuchsiaLoggingTest.py
index 782a733..a5e2db8 100644
--- a/src/antlion/tests/logging/FuchsiaLoggingTest.py
+++ b/src/antlion/tests/logging/FuchsiaLoggingTest.py
@@ -14,35 +14,48 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion import asserts
+from mobly import asserts, base_test, signals, test_runner
+from typing import List
+
+from antlion.controllers import fuchsia_device
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+
+MESSAGE = "Logging Test"
 
 
-class FuchsiaLoggingTest(BaseTestClass):
-
+class FuchsiaLoggingTest(base_test.BaseTestClass):
     def setup_class(self):
-        super().setup_class()
+        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
+            fuchsia_device
+        )
+
+        asserts.abort_class_if(
+            len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device"
+        )
+
         self.dut = self.fuchsia_devices[0]
-        self.message = "Logging Test"
 
     def test_log_err(self):
-        result = self.dut.sl4f.logging_lib.logE(self.message)
+        result = self.dut.sl4f.logging_lib.logE(MESSAGE)
         if result.get("error") is None:
             signals.TestPass(result.get("result"))
         else:
             signals.TestFailure(result.get("error"))
 
     def test_log_info(self):
-        result = self.dut.sl4f.logging_lib.logI(self.message)
+        result = self.dut.sl4f.logging_lib.logI(MESSAGE)
         if result.get("error") is None:
             signals.TestPass(result.get("result"))
         else:
             signals.TestFailure(result.get("error"))
 
     def test_log_warn(self):
-        result = self.dut.sl4f.logging_lib.logW(self.message)
+        result = self.dut.sl4f.logging_lib.logW(MESSAGE)
         if result.get("error") is None:
             signals.TestPass(result.get("result"))
         else:
             signals.TestFailure(result.get("error"))
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/netstack/NetstackIfaceTest.py b/src/antlion/tests/netstack/NetstackIfaceTest.py
index 5df07c6..fce3197 100644
--- a/src/antlion/tests/netstack/NetstackIfaceTest.py
+++ b/src/antlion/tests/netstack/NetstackIfaceTest.py
@@ -14,30 +14,37 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import signals
+import logging
+from typing import List
 
-from antlion.base_test import BaseTestClass
-from antlion import asserts
+from antlion.controllers import fuchsia_device
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+
+from mobly import asserts, signals, test_runner, base_test
 
 
-class NetstackIfaceTest(BaseTestClass):
+class NetstackIfaceTest(base_test.BaseTestClass):
     default_timeout = 10
     active_scan_callback_list = []
     active_adv_callback_list = []
     droid = None
 
     def setup_class(self):
-        super().setup_class()
-        if (len(self.fuchsia_devices) < 1):
-            self.log.error(
-                "NetstackFuchsiaTest Init: Not enough fuchsia devices.")
-        self.log.info("Running testbed setup with one fuchsia devices")
+        self.log = logging.getLogger()
+        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
+            fuchsia_device
+        )
+
+        asserts.abort_class_if(
+            len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device"
+        )
+
         self.dut = self.fuchsia_devices[0]
 
     def _enable_all_interfaces(self):
         interfaces = self.dut.sl4f.netstack_lib.netstackListInterfaces()
         for item in interfaces.get("result"):
-            identifier = item.get('id')
+            identifier = item.get("id")
             self.dut.sl4f.netstack_lib.enableInterface(identifier)
 
     def setup_test(self):
@@ -67,12 +74,11 @@
         Priority: 1
         """
         interfaces = self.dut.sl4f.netstack_lib.netstackListInterfaces()
-        if interfaces.get('error') is not None:
-            raise signals.TestFailure("Failed with {}".format(
-                interfaces.get('error')))
-        if len(interfaces.get('result')) < 1:
+        if interfaces.get("error") is not None:
+            raise signals.TestFailure("Failed with {}".format(interfaces.get("error")))
+        if len(interfaces.get("result")) < 1:
             raise signals.TestFailure("No interfaces found.")
-        self.log.info("Interfaces found: {}".format(interfaces.get('result')))
+        self.log.info("Interfaces found: {}".format(interfaces.get("result")))
         raise signals.TestPass("Success")
 
     def test_toggle_wlan_interface(self):
@@ -100,12 +106,12 @@
 
         def get_wlan_interfaces():
             result = self.dut.sl4f.netstack_lib.netstackListInterfaces()
-            if (error := result.get('error')):
-                raise signals.TestFailure(
-                    f'unable to list interfaces: {error}')
+            if error := result.get("error"):
+                raise signals.TestFailure(f"unable to list interfaces: {error}")
             return [
-                interface for interface in result.get('result')
-                if 'wlan' in interface.get('name')
+                interface
+                for interface in result.get("result")
+                if "wlan" in interface.get("name")
             ]
 
         def get_ids(interfaces):
@@ -113,15 +119,16 @@
 
         wlan_interfaces = get_wlan_interfaces()
         if not wlan_interfaces:
-            raise signals.TestSkip('no wlan interface found')
+            raise signals.TestSkip("no wlan interface found")
         interface_ids = get_ids(wlan_interfaces)
 
         # Disable the interfaces.
         for identifier in interface_ids:
             result = self.dut.sl4f.netstack_lib.disableInterface(identifier)
-            if (error := result.get('error')):
+            if error := result.get("error"):
                 raise signals.TestFailure(
-                    f'failed to disable wlan interface {identifier}: {error}')
+                    f"failed to disable wlan interface {identifier}: {error}"
+                )
 
         # Retrieve the interfaces again.
         disabled_wlan_interfaces = get_wlan_interfaces()
@@ -129,23 +136,29 @@
 
         if not disabled_interface_ids == interface_ids:
             raise signals.TestFailure(
-                f'disabled interface IDs do not match original interface IDs: original={interface_ids} disabled={disabled_interface_ids}'
+                f"disabled interface IDs do not match original interface IDs: original={interface_ids} disabled={disabled_interface_ids}"
             )
 
         # Check the current state of the interfaces.
         for interface in disabled_interfaces:
-            if len(interface_info.get('ipv4_addresses')) > 0:
+            if len(interface_info.get("ipv4_addresses")) > 0:
                 raise signals.TestFailure(
-                    f'no Ipv4 Address should be present: {interface}')
+                    f"no Ipv4 Address should be present: {interface}"
+                )
 
             # TODO (35981): Verify other values when interface down.
 
         # Re-enable the interfaces.
         for identifier in disabled_interface_ids:
             result = self.dut.sl4f.netstack_lib.enableInterface(identifier)
-            if (error := result.get('error')):
+            if error := result.get("error"):
                 raise signals.TestFailure(
-                    f'failed to enable wlan interface {identifier}: {error}')
+                    f"failed to enable wlan interface {identifier}: {error}"
+                )
 
         # TODO (35981): Verify other values when interface up.
         raise signals.TestPass("Success")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/netstack/NetstackIxiaTest.py b/src/antlion/tests/netstack/NetstackIxiaTest.py
deleted file mode 100644
index bbe90a6..0000000
--- a/src/antlion/tests/netstack/NetstackIxiaTest.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_bss_settings
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-
-from antlion.test_utils.net.NetstackBaseTest import NetstackBaseTest
-
-from antlion.utils import rand_ascii_str
-
-
-class NetstackIxiaTest(NetstackBaseTest):
-    def __init__(self, controllers):
-        NetstackBaseTest.__init__(self, controllers)
-
-    def setup_class(self):
-        self.log.info('Setup {cls}'.format(cls=type(self)))
-
-        if not self.fuchsia_devices:
-            self.log.error(
-                "NetstackFuchsiaTest Init: Not enough fuchsia devices.")
-        self.log.info("Running testbed setup with one fuchsia devices")
-        self.fuchsia_dev = self.fuchsia_devices[0]
-
-        # We want to bring up several 2GHz and 5GHz BSSes.
-        wifi_bands = ['2g', '5g']
-
-        # Currently AP_DEFAULT_CHANNEL_2G is 6
-        # and AP_DEFAULT_CHANNEL_5G is 36.
-        wifi_channels = [
-            hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            hostapd_constants.AP_DEFAULT_CHANNEL_5G
-        ]
-
-        # Each band will start up an Open BSS (security_mode=None)
-        # and a WPA2 BSS (security_mode=hostapd_constants.WPA2_STRING)
-        security_modes = [None, hostapd_constants.WPA2_STRING]
-
-        # All secure BSSes will use the same password.
-        wifi_password = rand_ascii_str(10)
-        self.log.info('Wi-Fi password for this test: {wifi_password}'.format(
-            wifi_password=wifi_password))
-        hostapd_configs = []
-        wifi_interfaces = {}
-        bss_settings = {}
-
-        # Build a configuration for each sub-BSSID
-        for band_index, wifi_band in enumerate(wifi_bands):
-            ssid_name = 'Ixia_{wifi_band}_#{bss_number}_{security_mode}'
-            bss_settings[wifi_band] = []
-
-            # Prepare the extra SSIDs.
-            for mode_index, security_mode in enumerate(security_modes):
-
-                # Skip the first SSID because we configure that separately.
-                # due to the way the APIs work.  This loop is only concerned
-                # with the sub-BSSIDs.
-                if mode_index == 0:
-                    continue
-
-                bss_name = ssid_name.format(wifi_band=wifi_band,
-                                            security_mode=security_mode,
-                                            bss_number=mode_index + 1)
-
-                bss_setting = hostapd_bss_settings.BssSettings(
-                    name=bss_name,
-                    ssid=bss_name,
-                    security=hostapd_security.Security(
-                        security_mode=security_mode, password=wifi_password))
-                bss_settings[wifi_band].append(bss_setting)
-
-            # This is the configuration for the first SSID.
-            ssid_name = ssid_name.format(wifi_band=wifi_band,
-                                         security_mode=security_modes[0],
-                                         bss_number=1)
-
-            hostapd_configs.append(
-                hostapd_ap_preset.create_ap_preset(
-                    profile_name='whirlwind',
-                    iface_wlan_2g='wlan0',
-                    iface_wlan_5g='wlan1',
-                    ssid=ssid_name,
-                    channel=wifi_channels[band_index],
-                    security=hostapd_security.Security(
-                        security_mode=security_modes[0],
-                        password=wifi_password),
-                    bss_settings=bss_settings[wifi_band]))
-
-            access_point = self.access_points[band_index]
-
-            # Now bring up the AP and track the interfaces we're using for
-            # each BSSID.  All BSSIDs are now beaconing.
-            wifi_interfaces[wifi_band] = access_point.start_ap(
-                hostapd_configs[band_index])
-
-            # Disable DHCP on this Wi-Fi band.
-            # Note: This also disables DHCP on each sub-BSSID due to how
-            # the APIs are built.
-            #
-            # We need to do this in order to enable IxANVL testing across
-            # Wi-Fi, which needs to configure the IP addresses per-interface
-            # on the client device.
-            access_point.stop_dhcp()
-
-            # Disable NAT.
-            # NAT config in access_point.py is global at the moment, but
-            # calling it twice (once per band) won't hurt anything.  This is
-            # easier than trying to conditionalize per band.
-            #
-            # Note that we could make this per-band, but it would require
-            # refactoring the access_point.py code that turns on NAT, however
-            # if that ever does happen then this code will work as expected
-            # without modification.
-            #
-            # This is also required for IxANVL testing.  NAT would interfere
-            # with IxANVL because IxANVL needs to see the raw frames
-            # sourcing/sinking from/to the DUT for protocols such as ARP and
-            # DHCP, but it also needs the MAC/IP of the source and destination
-            # frames and packets to be from the DUT, so we want the AP to act
-            # like a bridge for these tests.
-            access_point.stop_nat()
-
-        # eth1 is the LAN port, which will always be a part of the bridge.
-        bridge_interfaces = ['eth1']
-
-        # This adds each bssid interface to the bridge.
-        for wifi_band in wifi_bands:
-            for wifi_interface in wifi_interfaces[wifi_band]:
-                bridge_interfaces.append(wifi_interface)
-
-        # Each interface can only be a member of 1 bridge, so we're going to use
-        # the last access_point object to set the bridge up for all interfaces.
-        access_point.create_bridge(bridge_name='ixia_bridge0',
-                                   interfaces=bridge_interfaces)
-
-    def setup_test(self):
-        pass
-
-    def teardown_test(self):
-        pass
-
-    def teardown_class(self):
-        self.log.info('Teardown {cls}'.format(cls=type(self)))
-
-        import pdb
-        pdb.set_trace()
-
-        for access_point in self.access_points:
-            access_point.remove_bridge(bridge_name='ixia_bridge0')
-
-    """Tests"""
-
-    def test_do_nothing(self):
-        return True
diff --git a/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py b/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py
index 5ce5fda..36b52ad 100644
--- a/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py
+++ b/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py
@@ -14,27 +14,29 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import signals
+import logging
 import time
-from antlion.base_test import BaseTestClass
+from typing import List
+
+from antlion.controllers import fuchsia_device
+from antlion.controllers.fuchsia_device import FuchsiaDevice
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 
+from mobly import asserts, base_test, signals, test_runner
 
-class ToggleWlanInterfaceStressTest(BaseTestClass):
 
+class ToggleWlanInterfaceStressTest(base_test.BaseTestClass):
     def setup_class(self):
-        dut = self.user_params.get('dut', None)
-        if dut:
-            if dut == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif dut == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an Fuchsia device
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        self.log = logging.getLogger()
+        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
+            fuchsia_device
+        )
+
+        asserts.abort_class_if(
+            len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device"
+        )
+
+        self.dut = create_wlan_device(self.fuchsia_devices[0])
 
     def test_iface_toggle_and_ping(self):
         """Test that we don't error out when toggling WLAN interfaces.
@@ -63,32 +65,31 @@
             wlan_interfaces = self.dut.get_wlan_interface_id_list()
             print(wlan_interfaces)
             if len(wlan_interfaces) < 1:
-                raise signals.TestFailure(
-                    "Not enough wlan interfaces for test")
+                raise signals.TestFailure("Not enough wlan interfaces for test")
             if not self.dut.destroy_wlan_interface(wlan_interfaces[0]):
                 raise signals.TestFailure("Failed to destroy WLAN interface")
             # Really make sure it is dead
-            self.fuchsia_devices[0].ssh.run(
-                f"wlan iface del {wlan_interfaces[0]}")
+            self.fuchsia_devices[0].ssh.run(f"wlan iface del {wlan_interfaces[0]}")
             # Grace period
             time.sleep(2)
-            self.fuchsia_devices[0].ssh.run(
-                'wlan iface new --phy 0 --role Client')
+            self.fuchsia_devices[0].ssh.run("wlan iface new --phy 0 --role Client")
             end_time = time.time() + 300
             while time.time() < end_time:
                 time.sleep(1)
                 if self.dut.is_connected():
                     try:
-                        ping_result = self.dut.ping("8.8.8.8", 10, 1000, 1000,
-                                                    25)
+                        ping_result = self.dut.ping("8.8.8.8", 10, 1000, 1000, 25)
                         print(ping_result)
                     except Exception as err:
                         # TODO: Once we gain more stability, fail test when pinging fails
                         print("some err {}".format(err))
-                    time.sleep(2)  #give time for some traffic
+                    time.sleep(2)  # give time for some traffic
                     break
             if not self.dut.is_connected():
-                raise signals.TestFailure("Failed at iteration {}".format(i +
-                                                                          1))
+                raise signals.TestFailure("Failed at iteration {}".format(i + 1))
             self.log.info("Iteration {} successful".format(i + 1))
         raise signals.TestPass("Success")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/BUILD.gn b/src/antlion/tests/wlan/BUILD.gn
new file mode 100644
index 0000000..717fed9
--- /dev/null
+++ b/src/antlion/tests/wlan/BUILD.gn
@@ -0,0 +1,31 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_host, "antlion tests only supported for host testing")
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    "compliance:e2e_tests",
+    "facade:e2e_tests",
+    "functional:e2e_tests",
+    "misc:e2e_tests",
+    "performance:e2e_tests",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    "functional:e2e_tests_quick",
+  ]
+}
+
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    "functional:e2e_tests_manual",
+    "performance:e2e_tests_manual",
+  ]
+}
diff --git a/src/antlion/tests/wlan/compliance/BUILD.gn b/src/antlion/tests/wlan/compliance/BUILD.gn
new file mode 100644
index 0000000..bdfc396
--- /dev/null
+++ b/src/antlion/tests/wlan/compliance/BUILD.gn
@@ -0,0 +1,44 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("vape_interop_test") {
+  main_source = "VapeInteropTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_phy_compliance_11ac_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_phy_compliance_11n_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_phy_compliance_abg_test") {
+  main_source = "WlanPhyComplianceABGTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_security_compliance_abg_test") {
+  main_source = "WlanSecurityComplianceABGTest.py"
+  environments = display_ap_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":vape_interop_test($host_toolchain)",
+    ":wlan_phy_compliance_11ac_test($host_toolchain)",
+    ":wlan_phy_compliance_11n_test($host_toolchain)",
+    ":wlan_phy_compliance_abg_test($host_toolchain)",
+    ":wlan_security_compliance_abg_test($host_toolchain)",
+  ]
+}
diff --git a/src/antlion/tests/wlan/compliance/VapeInteropTest.py b/src/antlion/tests/wlan/compliance/VapeInteropTest.py
index 86bc3e3..32b39c6 100644
--- a/src/antlion/tests/wlan/compliance/VapeInteropTest.py
+++ b/src/antlion/tests/wlan/compliance/VapeInteropTest.py
@@ -14,16 +14,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import asserts
 from antlion import utils
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib.hostapd_security import Security
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
+
+from mobly import asserts, test_runner
 
 
-class VapeInteropTest(WifiBaseTest):
+class VapeInteropTest(base_test.WifiBaseTest):
     """Tests interoperability with mock third party AP profiles.
 
     Test Bed Requirement:
@@ -33,28 +34,28 @@
 
     def setup_class(self):
         super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
             self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
 
         self.access_point = self.access_points[0]
 
         # Same for both 2g and 5g
         self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        self.password = utils.rand_ascii_str(
-            hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
+        self.password = utils.rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
         self.security_profile_wpa2 = Security(
             security_mode=hostapd_constants.WPA2_STRING,
             password=self.password,
-            wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER)
+            wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
+        )
 
         self.access_point.stop_all_aps()
 
@@ -81,652 +82,842 @@
         self.access_point.stop_all_aps()
 
     def test_associate_actiontec_pk5000_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='actiontec_pk5000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="actiontec_pk5000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_actiontec_pk5000_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='actiontec_pk5000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="actiontec_pk5000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_actiontec_mi424wr_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='actiontec_mi424wr',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="actiontec_mi424wr",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_actiontec_mi424wr_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='actiontec_mi424wr',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="actiontec_mi424wr",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtac66u_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtac66u_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtac66u_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtac66u_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtac86u_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac86u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac86u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtac86u_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac86u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac86u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtac86u_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac86u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac86u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtac86u_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac86u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac86u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtac5300_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac5300',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac5300",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtac5300_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac5300',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac5300",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtac5300_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac5300',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac5300",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtac5300_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac5300',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac5300",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtn56u_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn56u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn56u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtn56u_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn56u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn56u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtn56u_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn56u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn56u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtn56u_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn56u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn56u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtn66u_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtn66u_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_asus_rtn66u_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_asus_rtn66u_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_belkin_f9k1001v5_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='belkin_f9k1001v5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="belkin_f9k1001v5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_belkin_f9k1001v5_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='belkin_f9k1001v5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="belkin_f9k1001v5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_linksys_ea4500_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea4500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea4500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_linksys_ea4500_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea4500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea4500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_linksys_ea4500_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea4500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea4500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_linksys_ea4500_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea4500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea4500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_linksys_ea9500_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea9500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea9500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_linksys_ea9500_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea9500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea9500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_linksys_ea9500_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea9500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea9500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_linksys_ea9500_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea9500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea9500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_linksys_wrt1900acv2_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_wrt1900acv2',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_wrt1900acv2",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_linksys_wrt1900acv2_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_wrt1900acv2',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_wrt1900acv2",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_linksys_wrt1900acv2_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_wrt1900acv2',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_wrt1900acv2",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_linksys_wrt1900acv2_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_wrt1900acv2',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_wrt1900acv2",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_netgear_r7000_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_r7000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_r7000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_netgear_r7000_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_r7000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_r7000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_netgear_r7000_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_r7000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_r7000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_netgear_r7000_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_r7000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_r7000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_netgear_wndr3400_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_wndr3400',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_wndr3400",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_netgear_wndr3400_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_wndr3400',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_wndr3400",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_netgear_wndr3400_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_wndr3400',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_wndr3400",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_netgear_wndr3400_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_wndr3400',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_wndr3400",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_securifi_almond_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='securifi_almond',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="securifi_almond",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_securifi_almond_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='securifi_almond',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="securifi_almond",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_tplink_archerc5_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_tplink_archerc5_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_tplink_archerc5_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_tplink_archerc5_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_tplink_archerc7_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc7',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc7",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_tplink_archerc7_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc7',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc7",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_tplink_archerc7_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc7',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc7",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_tplink_archerc7_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc7',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc7",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_tplink_c1200_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_c1200',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_c1200",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_tplink_c1200_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_c1200',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_c1200",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_tplink_c1200_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_c1200',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_c1200",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_tplink_c1200_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_c1200',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_c1200",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
 
     def test_associate_tplink_tlwr940n_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_tlwr940n',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_tlwr940n",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
 
     def test_associate_tplink_tlwr940n_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_tlwr940n',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_tlwr940n",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+            password=self.password,
+        )
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=hostapd_constants.WPA2_STRING,
+            ),
+            "Failed to connect.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
index 5efe1f4..4b797a5 100644
--- a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
+++ b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
@@ -16,15 +16,15 @@
 
 import itertools
 
-from antlion import asserts
-from antlion import utils
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib.hostapd_security import Security
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.utils import rand_ascii_str
 
+from mobly import asserts, test_runner
+
 # AC Capabilities
 """
 Capabilities Not Supported on Whirlwind:
@@ -42,12 +42,13 @@
 """
 VHT_MAX_MPDU_LEN = [
     hostapd_constants.AC_CAPABILITY_MAX_MPDU_7991,
-    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, ''
+    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+    "",
 ]
-RXLDPC = [hostapd_constants.AC_CAPABILITY_RXLDPC, '']
-SHORT_GI_80 = [hostapd_constants.AC_CAPABILITY_SHORT_GI_80, '']
-TX_STBC = [hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, '']
-RX_STBC = [hostapd_constants.AC_CAPABILITY_RX_STBC_1, '']
+RXLDPC = [hostapd_constants.AC_CAPABILITY_RXLDPC, ""]
+SHORT_GI_80 = [hostapd_constants.AC_CAPABILITY_SHORT_GI_80, ""]
+TX_STBC = [hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, ""]
+RX_STBC = [hostapd_constants.AC_CAPABILITY_RX_STBC_1, ""]
 MAX_A_MPDU = [
     hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0,
     hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1,
@@ -56,33 +57,39 @@
     hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4,
     hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5,
     hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, ''
+    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+    "",
 ]
-RX_ANTENNA = [hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, '']
-TX_ANTENNA = [hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, '']
+RX_ANTENNA = [hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, ""]
+TX_ANTENNA = [hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, ""]
 
 # Default 11N Capabilities
 N_CAPABS_40MHZ = [
-    hostapd_constants.N_CAPABILITY_LDPC, hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_LDPC,
+    hostapd_constants.N_CAPABILITY_SGI20,
     hostapd_constants.N_CAPABILITY_RX_STBC1,
-    hostapd_constants.N_CAPABILITY_SGI20, hostapd_constants.N_CAPABILITY_SGI40,
+    hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_SGI40,
     hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-    hostapd_constants.N_CAPABILITY_HT40_PLUS
+    hostapd_constants.N_CAPABILITY_HT40_PLUS,
 ]
 
 N_CAPABS_20MHZ = [
-    hostapd_constants.N_CAPABILITY_LDPC, hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_LDPC,
+    hostapd_constants.N_CAPABILITY_SGI20,
     hostapd_constants.N_CAPABILITY_RX_STBC1,
     hostapd_constants.N_CAPABILITY_SGI20,
     hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-    hostapd_constants.N_CAPABILITY_HT20
+    hostapd_constants.N_CAPABILITY_HT20,
 ]
 
 # Default wpa2 profile.
-WPA2_SECURITY = Security(security_mode=hostapd_constants.WPA2_STRING,
-                         password=rand_ascii_str(20),
-                         wpa_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
-                         wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER)
+WPA2_SECURITY = Security(
+    security_mode=hostapd_constants.WPA2_STRING,
+    password=rand_ascii_str(20),
+    wpa_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
+    wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
+)
 
 SECURITIES = [None, WPA2_SECURITY]
 
@@ -97,17 +104,17 @@
     Returns:
         A string test case name
     """
-    chbw = settings['chbw']
-    sec = 'wpa2' if settings['security'] else 'open'
+    chbw = settings["chbw"]
+    sec = "wpa2" if settings["security"] else "open"
     ret = []
     for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
-        if cap in settings['ac_capabilities']:
+        if cap in settings["ac_capabilities"]:
             ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
-    return 'test_11ac_%smhz_%s_%s' % (chbw, sec, ''.join(ret))
+    return "test_11ac_%smhz_%s_%s" % (chbw, sec, "".join(ret))
 
 
 # 6912 test cases
-class WlanPhyCompliance11ACTest(WifiBaseTest):
+class WlanPhyCompliance11ACTest(base_test.WifiBaseTest):
     """Tests for validating 11ac PHYS.
 
     Test Bed Requirement:
@@ -119,28 +126,33 @@
         super().__init__(controllers)
 
     def setup_generated_tests(self):
-        test_args = self._generate_20mhz_test_args() + \
-            self._generate_40mhz_test_args() + \
-            self._generate_80mhz_test_args()
-        self.generate_tests(test_logic=self.setup_and_connect,
-                            name_func=generate_test_name,
-                            arg_sets=test_args)
+        test_args = (
+            self._generate_20mhz_test_args()
+            + self._generate_40mhz_test_args()
+            + self._generate_80mhz_test_args()
+        )
+        self.generate_tests(
+            test_logic=self.setup_and_connect,
+            name_func=generate_test_name,
+            arg_sets=test_args,
+        )
 
     def setup_class(self):
         super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
             self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
 
         self.access_point = self.access_points[0]
-        self.android_devices = getattr(self, 'android_devices', [])
+        self.android_devices = getattr(self, "android_devices", [])
         self.access_point.stop_all_aps()
 
     def setup_test(self):
@@ -172,32 +184,35 @@
 
         """
         ssid = rand_ascii_str(20)
-        security = ap_settings['security']
-        chbw = ap_settings['chbw']
+        security = ap_settings["security"]
+        chbw = ap_settings["chbw"]
         password = None
         target_security = None
         if security:
             password = security.password
             target_security = security.security_mode_string
-        n_capabilities = ap_settings['n_capabilities']
-        ac_capabilities = ap_settings['ac_capabilities']
+        n_capabilities = ap_settings["n_capabilities"]
+        ac_capabilities = ap_settings["ac_capabilities"]
 
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 mode=hostapd_constants.MODE_11AC_MIXED,
-                 channel=36,
-                 n_capabilities=n_capabilities,
-                 ac_capabilities=ac_capabilities,
-                 force_wmm=True,
-                 ssid=ssid,
-                 security=security,
-                 vht_bandwidth=chbw,
-                 password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            mode=hostapd_constants.MODE_11AC_MIXED,
+            channel=36,
+            n_capabilities=n_capabilities,
+            ac_capabilities=ac_capabilities,
+            force_wmm=True,
+            ssid=ssid,
+            security=security,
+            vht_bandwidth=chbw,
+            password=password,
+        )
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_pwd=password,
-                               target_security=target_security),
-            'Failed to associate.')
+            self.dut.associate(
+                ssid, target_pwd=password, target_security=target_security
+            ),
+            "Failed to associate.",
+        )
 
     # 1728 tests
     def _generate_20mhz_test_args(self):
@@ -205,18 +220,28 @@
 
         # 864 test cases for open security
         # 864 test cases for wpa2 security
-        for combination in itertools.product(SECURITIES, VHT_MAX_MPDU_LEN,
-                                             RXLDPC, RX_STBC, TX_STBC,
-                                             MAX_A_MPDU, RX_ANTENNA,
-                                             TX_ANTENNA):
+        for combination in itertools.product(
+            SECURITIES,
+            VHT_MAX_MPDU_LEN,
+            RXLDPC,
+            RX_STBC,
+            TX_STBC,
+            MAX_A_MPDU,
+            RX_ANTENNA,
+            TX_ANTENNA,
+        ):
             security = combination[0]
             ac_capabilities = combination[1:]
-            test_args.append(({
-                'chbw': 20,
-                'security': security,
-                'n_capabilities': N_CAPABS_20MHZ,
-                'ac_capabilities': ac_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "chbw": 20,
+                        "security": security,
+                        "n_capabilities": N_CAPABS_20MHZ,
+                        "ac_capabilities": ac_capabilities,
+                    },
+                )
+            )
 
         return test_args
 
@@ -226,18 +251,28 @@
 
         # 864 test cases for open security
         # 864 test cases for wpa2 security
-        for combination in itertools.product(SECURITIES, VHT_MAX_MPDU_LEN,
-                                             RXLDPC, RX_STBC, TX_STBC,
-                                             MAX_A_MPDU, RX_ANTENNA,
-                                             TX_ANTENNA):
+        for combination in itertools.product(
+            SECURITIES,
+            VHT_MAX_MPDU_LEN,
+            RXLDPC,
+            RX_STBC,
+            TX_STBC,
+            MAX_A_MPDU,
+            RX_ANTENNA,
+            TX_ANTENNA,
+        ):
             security = combination[0]
             ac_capabilities = combination[1:]
-            test_args.append(({
-                'chbw': 40,
-                'security': security,
-                'n_capabilities': N_CAPABS_40MHZ,
-                'ac_capabilities': ac_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "chbw": 40,
+                        "security": security,
+                        "n_capabilities": N_CAPABS_40MHZ,
+                        "ac_capabilities": ac_capabilities,
+                    },
+                )
+            )
 
         return test_args
 
@@ -247,16 +282,31 @@
 
         # 1728 test cases for open security
         # 1728 test cases for wpa2 security
-        for combination in itertools.product(SECURITIES, VHT_MAX_MPDU_LEN,
-                                             RXLDPC, SHORT_GI_80, RX_STBC,
-                                             TX_STBC, MAX_A_MPDU, RX_ANTENNA,
-                                             TX_ANTENNA):
+        for combination in itertools.product(
+            SECURITIES,
+            VHT_MAX_MPDU_LEN,
+            RXLDPC,
+            SHORT_GI_80,
+            RX_STBC,
+            TX_STBC,
+            MAX_A_MPDU,
+            RX_ANTENNA,
+            TX_ANTENNA,
+        ):
             security = combination[0]
             ac_capabilities = combination[1:]
-            test_args.append(({
-                'chbw': 80,
-                'security': security,
-                'n_capabilities': N_CAPABS_40MHZ,
-                'ac_capabilities': ac_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "chbw": 80,
+                        "security": security,
+                        "n_capabilities": N_CAPABS_40MHZ,
+                        "ac_capabilities": ac_capabilities,
+                    },
+                )
+            )
         return test_args
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py
index d30982c..244953e 100644
--- a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py
+++ b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py
@@ -16,7 +16,6 @@
 
 import itertools
 
-from antlion import asserts
 from antlion import utils
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
@@ -24,25 +23,27 @@
 from antlion.controllers.ap_lib.hostapd_security import Security
 from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 
-FREQUENCY_24 = ['2.4GHz']
-FREQUENCY_5 = ['5GHz']
-CHANNEL_BANDWIDTH_20 = ['HT20']
-CHANNEL_BANDWIDTH_40_LOWER = ['HT40-']
-CHANNEL_BANDWIDTH_40_UPPER = ['HT40+']
-SECURITY_OPEN = 'open'
-SECURITY_WPA2 = 'wpa2'
+from mobly import asserts, test_runner
+
+FREQUENCY_24 = ["2.4GHz"]
+FREQUENCY_5 = ["5GHz"]
+CHANNEL_BANDWIDTH_20 = ["HT20"]
+CHANNEL_BANDWIDTH_40_LOWER = ["HT40-"]
+CHANNEL_BANDWIDTH_40_UPPER = ["HT40+"]
+SECURITY_OPEN = "open"
+SECURITY_WPA2 = "wpa2"
 N_MODE = [hostapd_constants.MODE_11N_PURE, hostapd_constants.MODE_11N_MIXED]
-LDPC = [hostapd_constants.N_CAPABILITY_LDPC, '']
-TX_STBC = [hostapd_constants.N_CAPABILITY_TX_STBC, '']
-RX_STBC = [hostapd_constants.N_CAPABILITY_RX_STBC1, '']
-SGI_20 = [hostapd_constants.N_CAPABILITY_SGI20, '']
-SGI_40 = [hostapd_constants.N_CAPABILITY_SGI40, '']
-DSSS_CCK = [hostapd_constants.N_CAPABILITY_DSSS_CCK_40, '']
-INTOLERANT_40 = [hostapd_constants.N_CAPABILITY_40_INTOLERANT, '']
-MAX_AMPDU_7935 = [hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, '']
-SMPS = [hostapd_constants.N_CAPABILITY_SMPS_STATIC, '']
+LDPC = [hostapd_constants.N_CAPABILITY_LDPC, ""]
+TX_STBC = [hostapd_constants.N_CAPABILITY_TX_STBC, ""]
+RX_STBC = [hostapd_constants.N_CAPABILITY_RX_STBC1, ""]
+SGI_20 = [hostapd_constants.N_CAPABILITY_SGI20, ""]
+SGI_40 = [hostapd_constants.N_CAPABILITY_SGI40, ""]
+DSSS_CCK = [hostapd_constants.N_CAPABILITY_DSSS_CCK_40, ""]
+INTOLERANT_40 = [hostapd_constants.N_CAPABILITY_40_INTOLERANT, ""]
+MAX_AMPDU_7935 = [hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, ""]
+SMPS = [hostapd_constants.N_CAPABILITY_SMPS_STATIC, ""]
 
 
 def generate_test_name(settings):
@@ -56,21 +57,25 @@
     """
     ret = []
     for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
-        if cap in settings['n_capabilities']:
+        if cap in settings["n_capabilities"]:
             ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
     # '+' is used by Mobile Harness as special character, don't use it in test names
-    if settings['chbw'] == 'HT40-':
+    if settings["chbw"] == "HT40-":
         chbw = "HT40Lower"
-    elif settings['chbw'] == 'HT40+':
+    elif settings["chbw"] == "HT40+":
         chbw = "HT40Upper"
     else:
-        chbw = settings['chbw']
-    return 'test_11n_%s_%s_%s_%s_%s' % (settings['frequency'], chbw,
-                                        settings['security'],
-                                        settings['n_mode'], ''.join(ret))
+        chbw = settings["chbw"]
+    return "test_11n_%s_%s_%s_%s_%s" % (
+        settings["frequency"],
+        chbw,
+        settings["security"],
+        settings["n_mode"],
+        "".join(ret),
+    )
 
 
-class WlanPhyCompliance11NTest(WifiBaseTest):
+class WlanPhyCompliance11NTest(base_test.WifiBaseTest):
     """Tests for validating 11n PHYS.
 
     Test Bed Requirement:
@@ -82,36 +87,40 @@
         super().__init__(controllers)
 
     def setup_generated_tests(self):
-        test_args = self._generate_24_HT20_test_args() + \
-            self._generate_24_HT40_lower_test_args() + \
-            self._generate_24_HT40_upper_test_args() + \
-            self._generate_5_HT20_test_args() + \
-            self._generate_5_HT40_lower_test_args() + \
-            self._generate_5_HT40_upper_test_args() + \
-            self._generate_24_HT20_wpa2_test_args() + \
-            self._generate_24_HT40_lower_wpa2_test_args() + \
-            self._generate_24_HT40_upper_wpa2_test_args() + \
-            self._generate_5_HT20_wpa2_test_args() + \
-            self._generate_5_HT40_lower_wpa2_test_args() + \
-            self._generate_5_HT40_upper_wpa2_test_args()
+        test_args = (
+            self._generate_24_HT20_test_args()
+            + self._generate_24_HT40_lower_test_args()
+            + self._generate_24_HT40_upper_test_args()
+            + self._generate_5_HT20_test_args()
+            + self._generate_5_HT40_lower_test_args()
+            + self._generate_5_HT40_upper_test_args()
+            + self._generate_24_HT20_wpa2_test_args()
+            + self._generate_24_HT40_lower_wpa2_test_args()
+            + self._generate_24_HT40_upper_wpa2_test_args()
+            + self._generate_5_HT20_wpa2_test_args()
+            + self._generate_5_HT40_lower_wpa2_test_args()
+            + self._generate_5_HT40_upper_wpa2_test_args()
+        )
 
-        self.generate_tests(test_logic=self.setup_and_connect,
-                            name_func=generate_test_name,
-                            arg_sets=test_args)
+        self.generate_tests(
+            test_logic=self.setup_and_connect,
+            name_func=generate_test_name,
+            arg_sets=test_args,
+        )
 
     def setup_class(self):
         super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
             self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
 
         self.access_point = self.access_points[0]
         self.access_point.stop_all_aps()
@@ -148,290 +157,437 @@
         ssid = utils.rand_ascii_str(20)
         security_profile = None
         password = None
-        temp_n_capabilities = list(ap_settings['n_capabilities'])
+        temp_n_capabilities = list(ap_settings["n_capabilities"])
         n_capabilities = []
         for n_capability in temp_n_capabilities:
             if n_capability in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
                 n_capabilities.append(n_capability)
 
-        if ap_settings['chbw'] == 'HT20' or ap_settings['chbw'] == 'HT40+':
-            if ap_settings['frequency'] == '2.4GHz':
+        if ap_settings["chbw"] == "HT20" or ap_settings["chbw"] == "HT40+":
+            if ap_settings["frequency"] == "2.4GHz":
                 channel = 1
-            elif ap_settings['frequency'] == '5GHz':
+            elif ap_settings["frequency"] == "5GHz":
                 channel = 36
             else:
-                raise ValueError('Invalid frequence: %s' %
-                                 ap_settings['frequency'])
+                raise ValueError("Invalid frequence: %s" % ap_settings["frequency"])
 
-        elif ap_settings['chbw'] == 'HT40-':
-            if ap_settings['frequency'] == '2.4GHz':
+        elif ap_settings["chbw"] == "HT40-":
+            if ap_settings["frequency"] == "2.4GHz":
                 channel = 11
-            elif ap_settings['frequency'] == '5GHz':
+            elif ap_settings["frequency"] == "5GHz":
                 channel = 60
             else:
-                raise ValueError('Invalid frequency: %s' %
-                                 ap_settings['frequency'])
+                raise ValueError("Invalid frequency: %s" % ap_settings["frequency"])
 
         else:
-            raise ValueError('Invalid channel bandwidth: %s' %
-                             ap_settings['chbw'])
+            raise ValueError("Invalid channel bandwidth: %s" % ap_settings["chbw"])
 
-        if ap_settings['chbw'] == 'HT40-' or ap_settings['chbw'] == 'HT40+':
+        if ap_settings["chbw"] == "HT40-" or ap_settings["chbw"] == "HT40+":
             if hostapd_config.ht40_plus_allowed(channel):
                 extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
             elif hostapd_config.ht40_minus_allowed(channel):
                 extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
             else:
-                raise ValueError('Invalid channel: %s' % channel)
+                raise ValueError("Invalid channel: %s" % channel)
             n_capabilities.append(extended_channel)
 
-        if ap_settings['security'] == 'wpa2':
+        if ap_settings["security"] == "wpa2":
             security_profile = Security(
                 security_mode=SECURITY_WPA2,
                 password=generate_random_password(length=20),
-                wpa_cipher='CCMP',
-                wpa2_cipher='CCMP')
+                wpa_cipher="CCMP",
+                wpa2_cipher="CCMP",
+            )
             password = security_profile.password
-        target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-            ap_settings['security'], None)
+        target_security = (
+            hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                ap_settings["security"], None
+            )
+        )
 
-        mode = ap_settings['n_mode']
+        mode = ap_settings["n_mode"]
         if mode not in N_MODE:
-            raise ValueError('Invalid n-mode: %s' % ap_settings['n-mode'])
+            raise ValueError("Invalid n-mode: %s" % ap_settings["n-mode"])
 
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 mode=mode,
-                 channel=channel,
-                 n_capabilities=n_capabilities,
-                 ac_capabilities=[],
-                 force_wmm=True,
-                 ssid=ssid,
-                 security=security_profile,
-                 password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            mode=mode,
+            channel=channel,
+            n_capabilities=n_capabilities,
+            ac_capabilities=[],
+            force_wmm=True,
+            ssid=ssid,
+            security=security_profile,
+            password=password,
+        )
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_pwd=password,
-                               target_security=target_security),
-            'Failed to connect.')
+            self.dut.associate(
+                ssid, target_pwd=password, target_security=target_security
+            ),
+            "Failed to connect.",
+        )
 
     def _generate_24_HT20_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_20, N_MODE,
-                                             LDPC, TX_STBC, RX_STBC, SGI_20,
-                                             INTOLERANT_40, MAX_AMPDU_7935,
-                                             SMPS):
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_20,
+            N_MODE,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            INTOLERANT_40,
+            MAX_AMPDU_7935,
+            SMPS,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_mode = combination[2]
             n_capabilities = combination[3:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': n_mode,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities,
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": n_mode,
+                        "security": SECURITY_OPEN,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_24_HT40_lower_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_40_LOWER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_40_LOWER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_OPEN,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_24_HT40_upper_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_40_UPPER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_40_UPPER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_OPEN,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_5_HT20_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_5, CHANNEL_BANDWIDTH_20,
-                                             LDPC, TX_STBC, RX_STBC, SGI_20,
-                                             INTOLERANT_40, MAX_AMPDU_7935,
-                                             SMPS):
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_20,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            INTOLERANT_40,
+            MAX_AMPDU_7935,
+            SMPS,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_OPEN,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_5_HT40_lower_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_5,
-                                             CHANNEL_BANDWIDTH_40_LOWER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_40_LOWER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_OPEN,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_5_HT40_upper_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_5,
-                                             CHANNEL_BANDWIDTH_40_UPPER,
-                                             N_MODE, LDPC, TX_STBC, RX_STBC,
-                                             SGI_20, SGI_40, MAX_AMPDU_7935,
-                                             SMPS, DSSS_CCK):
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_40_UPPER,
+            N_MODE,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_mode = combination[2]
             n_capabilities = combination[3:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': n_mode,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": n_mode,
+                        "security": SECURITY_OPEN,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_24_HT20_wpa2_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_20, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20,
-                                             INTOLERANT_40, MAX_AMPDU_7935,
-                                             SMPS):
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_20,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            INTOLERANT_40,
+            MAX_AMPDU_7935,
+            SMPS,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_WPA2,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_24_HT40_lower_wpa2_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_40_LOWER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_40_LOWER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_WPA2,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_24_HT40_upper_wpa2_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_40_UPPER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_40_UPPER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_WPA2,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_5_HT20_wpa2_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_5, CHANNEL_BANDWIDTH_20,
-                                             LDPC, TX_STBC, RX_STBC, SGI_20,
-                                             INTOLERANT_40, MAX_AMPDU_7935,
-                                             SMPS):
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_20,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            INTOLERANT_40,
+            MAX_AMPDU_7935,
+            SMPS,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_WPA2,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_5_HT40_lower_wpa2_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_5,
-                                             CHANNEL_BANDWIDTH_40_LOWER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_40_LOWER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_WPA2,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
 
     def _generate_5_HT40_upper_wpa2_test_args(self):
         test_args = []
-        for combination in itertools.product(FREQUENCY_5,
-                                             CHANNEL_BANDWIDTH_40_UPPER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_40_UPPER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
             test_frequency = combination[0]
             test_chbw = combination[1]
             n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
+            test_args.append(
+                (
+                    {
+                        "frequency": test_frequency,
+                        "chbw": test_chbw,
+                        "n_mode": hostapd_constants.MODE_11N_MIXED,
+                        "security": SECURITY_WPA2,
+                        "n_capabilities": n_capabilities,
+                    },
+                )
+            )
         return test_args
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py b/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py
index dc2116e..3cadf83 100644
--- a/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py
+++ b/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py
@@ -14,16 +14,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import asserts
 from antlion import utils
-
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
+
+from mobly import asserts, test_runner
 
 
-class WlanPhyComplianceABGTest(WifiBaseTest):
+class WlanPhyComplianceABGTest(base_test.WifiBaseTest):
     """Tests for validating 11a, 11b, and 11g PHYS.
 
     Test Bed Requirement:
@@ -33,76 +33,76 @@
 
     def setup_class(self):
         super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
             self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
 
         self.access_point = self.access_points[0]
         open_network = self.get_open_network(False, [])
         open_network_min_len = self.get_open_network(
-            False, [],
+            False,
+            [],
             ssid_length_2g=hostapd_constants.AP_SSID_MIN_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_MIN_LENGTH_5G)
+            ssid_length_5g=hostapd_constants.AP_SSID_MIN_LENGTH_5G,
+        )
         open_network_max_len = self.get_open_network(
-            False, [],
+            False,
+            [],
             ssid_length_2g=hostapd_constants.AP_SSID_MAX_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_MAX_LENGTH_5G)
-        self.open_network_2g = open_network['2g']
-        self.open_network_5g = open_network['5g']
-        self.open_network_max_len_2g = open_network_max_len['2g']
-        self.open_network_max_len_2g['SSID'] = (
-            self.open_network_max_len_2g['SSID'][3:])
-        self.open_network_max_len_5g = open_network_max_len['5g']
-        self.open_network_max_len_5g['SSID'] = (
-            self.open_network_max_len_5g['SSID'][3:])
-        self.open_network_min_len_2g = open_network_min_len['2g']
-        self.open_network_min_len_2g['SSID'] = (
-            self.open_network_min_len_2g['SSID'][3:])
-        self.open_network_min_len_5g = open_network_min_len['5g']
-        self.open_network_min_len_5g['SSID'] = (
-            self.open_network_min_len_5g['SSID'][3:])
+            ssid_length_5g=hostapd_constants.AP_SSID_MAX_LENGTH_5G,
+        )
+        self.open_network_2g = open_network["2g"]
+        self.open_network_5g = open_network["5g"]
+        self.open_network_max_len_2g = open_network_max_len["2g"]
+        self.open_network_max_len_2g["SSID"] = self.open_network_max_len_2g["SSID"][3:]
+        self.open_network_max_len_5g = open_network_max_len["5g"]
+        self.open_network_max_len_5g["SSID"] = self.open_network_max_len_5g["SSID"][3:]
+        self.open_network_min_len_2g = open_network_min_len["2g"]
+        self.open_network_min_len_2g["SSID"] = self.open_network_min_len_2g["SSID"][3:]
+        self.open_network_min_len_5g = open_network_min_len["5g"]
+        self.open_network_min_len_5g["SSID"] = self.open_network_min_len_5g["SSID"][3:]
 
-        self.utf8_ssid_2g = '2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢'
-        self.utf8_ssid_5g = '5𝔤_𝔊𝔬𝔬𝔤𝔩𝔢'
+        self.utf8_ssid_2g = "2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢"
+        self.utf8_ssid_5g = "5𝔤_𝔊𝔬𝔬𝔤𝔩𝔢"
 
-        self.utf8_ssid_2g_french = 'Château du Feÿ'
-        self.utf8_password_2g_french = 'du Feÿ Château'
+        self.utf8_ssid_2g_french = "Château du Feÿ"
+        self.utf8_password_2g_french = "du Feÿ Château"
 
-        self.utf8_ssid_2g_german = 'Rat für Straßenatlas'
-        self.utf8_password_2g_german = 'für Straßenatlas Rat'
+        self.utf8_ssid_2g_german = "Rat für Straßenatlas"
+        self.utf8_password_2g_german = "für Straßenatlas Rat"
 
-        self.utf8_ssid_2g_dutch = 'Die niet óúd, is níéuw!'
-        self.utf8_password_2g_dutch = 'niet óúd, is níéuw! Die'
+        self.utf8_ssid_2g_dutch = "Die niet óúd, is níéuw!"
+        self.utf8_password_2g_dutch = "niet óúd, is níéuw! Die"
 
-        self.utf8_ssid_2g_swedish = 'Det är femtioåtta'
-        self.utf8_password_2g_swedish = 'femtioåtta Det är'
+        self.utf8_ssid_2g_swedish = "Det är femtioåtta"
+        self.utf8_password_2g_swedish = "femtioåtta Det är"
 
-        self.utf8_ssid_2g_norwegian = 'Curaçao ØÆ æ å å å'
-        self.utf8_password_2g_norwegian = 'ØÆ Curaçao æ å å å'
+        self.utf8_ssid_2g_norwegian = "Curaçao ØÆ æ å å å"
+        self.utf8_password_2g_norwegian = "ØÆ Curaçao æ å å å"
 
-        #Danish and Norwegian has the same alphabet
+        # Danish and Norwegian has the same alphabet
         self.utf8_ssid_2g_danish = self.utf8_ssid_2g_norwegian
         self.utf8_password_2g_danish = self.utf8_password_2g_norwegian
 
-        self.utf8_ssid_2g_japanese = 'あなた はお母さん'
-        self.utf8_password_2g_japanese = 'そっくりね。あな'
+        self.utf8_ssid_2g_japanese = "あなた はお母さん"
+        self.utf8_password_2g_japanese = "そっくりね。あな"
 
-        self.utf8_ssid_2g_spanish = '¡No á,é,í,ó,ú,ü,ñ,¿,¡'
-        self.utf8_password_2g_spanish = 'á,é,í,ó,ú,ü,ñ,¿,¡ ¡No'
+        self.utf8_ssid_2g_spanish = "¡No á,é,í,ó,ú,ü,ñ,¿,¡"
+        self.utf8_password_2g_spanish = "á,é,í,ó,ú,ü,ñ,¿,¡ ¡No"
 
-        self.utf8_ssid_2g_italian = 'caffè Pinocchio è italiano?'
-        self.utf8_password_2g_italian = 'Pinocchio è italiano? caffè'
+        self.utf8_ssid_2g_italian = "caffè Pinocchio è italiano?"
+        self.utf8_password_2g_italian = "Pinocchio è italiano? caffè"
 
-        self.utf8_ssid_2g_korean = 'ㅘㅙㅚㅛㅜㅝㅞㅟㅠ'
-        self.utf8_password_2g_korean = 'ㅜㅝㅞㅟㅠㅘㅙㅚㅛ'
+        self.utf8_ssid_2g_korean = "ㅘㅙㅚㅛㅜㅝㅞㅟㅠ"
+        self.utf8_password_2g_korean = "ㅜㅝㅞㅟㅠㅘㅙㅚㅛ"
 
         self.access_point.stop_all_aps()
 
@@ -129,1370 +129,1828 @@
         self.access_point.stop_all_aps()
 
     def test_associate_11b_only_long_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=False)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=False,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_short_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=True)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_minimal_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=15)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=15,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_maximum_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=1024)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=1024,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_frag_threshold_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_rts_threshold_256(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_rts_256_frag_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256,
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_high_dtim_low_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=3,
-                 beacon_interval=100)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=3,
+            beacon_interval=100,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_low_dtim_high_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=1,
-                 beacon_interval=300)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=1,
+            beacon_interval=300,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_with_default_values(self):
         setup_ap(
             access_point=self.access_point,
-            profile_name='whirlwind_11ab_legacy',
+            profile_name="whirlwind_11ab_legacy",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.open_network_2g['SSID'],
+            ssid=self.open_network_2g["SSID"],
             force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_with_non_default_values(self):
         setup_ap(
             access_point=self.access_point,
-            profile_name='whirlwind_11ab_legacy',
+            profile_name="whirlwind_11ab_legacy",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.open_network_2g['SSID'],
+            ssid=self.open_network_2g["SSID"],
             force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_ACM_on_BK(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS, hostapd_constants.WMM_ACM_BK
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_ACM_on_BE(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS, hostapd_constants.WMM_ACM_BE
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_ACM_on_VI(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS, hostapd_constants.WMM_ACM_VI
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_ACM_on_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS, hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VI(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VI,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_ACM_on_BK_VI_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_VI,
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_WMM_ACM_on_BE_VI_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VI,
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_country_code(self):
         country_info = utils.merge_dicts(
             hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['UNITED_STATES'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.COUNTRY_STRING["ALL"],
+            hostapd_constants.COUNTRY_CODE["UNITED_STATES"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_non_country_code(self):
         country_info = utils.merge_dicts(
             hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['NON_COUNTRY'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.COUNTRY_STRING["ALL"],
+            hostapd_constants.COUNTRY_CODE["NON_COUNTRY"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_hidden_ssid(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 hidden=True)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            hidden=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_vendor_ie_in_beacon_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_vendor_ie_in_beacon_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_vendor_ie_in_assoc_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_association_response'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_association_response"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11b_only_with_vendor_ie_in_assoc_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.VENDOR_IE[
-                     'zero_length_association_'
-                     'response_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_association_" "response_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_long_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 preamble=False)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            preamble=False,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_short_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 preamble=True)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            preamble=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_minimal_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 beacon_interval=15)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            beacon_interval=15,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_maximum_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 beacon_interval=1024)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            beacon_interval=1024,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_frag_threshold_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_rts_threshold_256(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 rts_threshold=256)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            rts_threshold=256,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_rts_256_frag_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 rts_threshold=256,
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            rts_threshold=256,
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_high_dtim_low_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 dtim_period=3,
-                 beacon_interval=100)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            dtim_period=3,
+            beacon_interval=100,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_low_dtim_high_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 dtim_period=1,
-                 beacon_interval=300)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            dtim_period=1,
+            beacon_interval=300,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_with_default_values(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_with_non_default_values(self):
         setup_ap(
             access_point=self.access_point,
-            profile_name='whirlwind_11ab_legacy',
+            profile_name="whirlwind_11ab_legacy",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=self.open_network_5g['SSID'],
+            ssid=self.open_network_5g["SSID"],
             force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_ACM_on_BK(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_ACM_on_BE(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BE,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_ACM_on_VI(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_VI,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_ACM_on_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VI(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VI,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_ACM_on_BK_VI_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_VI,
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_WMM_ACM_on_BE_VI_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VI,
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_country_code(self):
         country_info = utils.merge_dicts(
             hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['UNITED_STATES'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.COUNTRY_STRING["ALL"],
+            hostapd_constants.COUNTRY_CODE["UNITED_STATES"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_non_country_code(self):
         country_info = utils.merge_dicts(
             hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['NON_COUNTRY'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.COUNTRY_STRING["ALL"],
+            hostapd_constants.COUNTRY_CODE["NON_COUNTRY"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_hidden_ssid(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 hidden=True)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            hidden=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_vendor_ie_in_beacon_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'])
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_vendor_ie_in_beacon_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_vendor_ie_in_assoc_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_association_response'])
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_association_response"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11a_only_with_vendor_ie_in_assoc_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=hostapd_constants.VENDOR_IE[
-                     'zero_length_association_'
-                     'response_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_association_" "response_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_long_preamble(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=False,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=False,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_short_preamble(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=True,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=True,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_minimal_beacon_interval(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=15,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=15,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_maximum_beacon_interval(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=1024,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=1024,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_frag_threshold_430(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 frag_threshold=430,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            frag_threshold=430,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_rts_threshold_256(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_rts_256_frag_430(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+            frag_threshold=430,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_high_dtim_low_beacon_interval(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=3,
-                 beacon_interval=100,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=3,
+            beacon_interval=100,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_low_dtim_high_beacon_interval(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=1,
-                 beacon_interval=300,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=1,
+            beacon_interval=300,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_with_default_values(self):
         data_rates = utils.merge_dicts(
             hostapd_constants.OFDM_DATA_RATES,
             hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_with_non_default_values(self):
         data_rates = utils.merge_dicts(
             hostapd_constants.OFDM_DATA_RATES,
             hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.WMM_NON_DEFAULT_PARAMS)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_NON_DEFAULT_PARAMS,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_ACM_on_BK(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_ACM_on_BE(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BE,
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_ACM_on_VI(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_VI,
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_ACM_on_VO(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VO, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_VO,
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VI(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VI, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VI,
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VO(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VO, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VO,
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_ACM_on_BK_VI_VO(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_VI,
+            hostapd_constants.WMM_ACM_VO,
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_WMM_ACM_on_BE_VI_VO(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VI,
+            hostapd_constants.WMM_ACM_VO,
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_country_code(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         country_info = utils.merge_dicts(
             hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['UNITED_STATES'], data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.COUNTRY_STRING["ALL"],
+            hostapd_constants.COUNTRY_CODE["UNITED_STATES"],
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_non_country_code(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
         country_info = utils.merge_dicts(
             hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['NON_COUNTRY'], data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.COUNTRY_STRING["ALL"],
+            hostapd_constants.COUNTRY_CODE["NON_COUNTRY"],
+            data_rates,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_hidden_ssid(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 hidden=True,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        data_rates = utils.merge_dicts(
+            hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            hidden=True,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_vendor_ie_in_beacon_correct_length(self):
         data_rates = utils.merge_dicts(
             hostapd_constants.OFDM_DATA_RATES,
             hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_beacon'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.VENDOR_IE["correct_length_beacon"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_vendor_ie_in_beacon_zero_length(self):
         data_rates = utils.merge_dicts(
             hostapd_constants.OFDM_DATA_RATES,
             hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['zero_length_beacon_without_data'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.VENDOR_IE["zero_length_beacon_without_data"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_vendor_ie_in_assoc_correct_length(self):
         data_rates = utils.merge_dicts(
             hostapd_constants.OFDM_DATA_RATES,
             hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_association_response'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.VENDOR_IE["correct_length_association_response"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_vendor_ie_in_assoc_zero_length(self):
         data_rates = utils.merge_dicts(
             hostapd_constants.OFDM_DATA_RATES,
             hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_association_response'],
-            hostapd_constants.VENDOR_IE['zero_length_association_'
-                                        'response_without_data'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.VENDOR_IE["correct_length_association_response"],
+            hostapd_constants.VENDOR_IE[
+                "zero_length_association_" "response_without_data"
+            ],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_only_long_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=False)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=False,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_short_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=True)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_minimal_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=15)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=15,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_maximum_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=1024)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=1024,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_frag_threshold_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_rts_threshold_256(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_rts_256_frag_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256,
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_high_dtim_low_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=3,
-                 beacon_interval=100)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=3,
+            beacon_interval=100,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_low_dtim_high_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=1,
-                 beacon_interval=300)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=1,
+            beacon_interval=300,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_with_default_values(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_with_non_default_values(self):
         setup_ap(
             access_point=self.access_point,
-            profile_name='whirlwind_11ag_legacy',
+            profile_name="whirlwind_11ag_legacy",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.open_network_2g['SSID'],
+            ssid=self.open_network_2g["SSID"],
             force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_ACM_on_BK(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_ACM_on_BE(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BE,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_ACM_on_VI(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_VI,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_ACM_on_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_ACM_on_BK_BE_VI(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VI,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_ACM_on_BK_BE_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_ACM_on_BK_VI_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BK,
+            hostapd_constants.WMM_ACM_VI,
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_WMM_ACM_on_BE_VI_VO(self):
         wmm_acm_bits_enabled = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.WMM_ACM_BE,
+            hostapd_constants.WMM_ACM_VI,
+            hostapd_constants.WMM_ACM_VO,
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_country_code(self):
         country_info = utils.merge_dicts(
             hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['UNITED_STATES'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.COUNTRY_STRING["ALL"],
+            hostapd_constants.COUNTRY_CODE["UNITED_STATES"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_non_country_code(self):
         country_info = utils.merge_dicts(
             hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['NON_COUNTRY'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.COUNTRY_STRING["ALL"],
+            hostapd_constants.COUNTRY_CODE["NON_COUNTRY"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_only_with_hidden_ssid(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 hidden=True)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            hidden=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_vendor_ie_in_beacon_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11bg_with_vendor_ie_in_beacon_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_vendor_ie_in_assoc_correct_length(self):
         data_rates = utils.merge_dicts(
             hostapd_constants.OFDM_DATA_RATES,
             hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_association_response'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.VENDOR_IE["correct_length_association_response"],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_associate_11g_only_with_vendor_ie_in_assoc_zero_length(self):
         data_rates = utils.merge_dicts(
             hostapd_constants.OFDM_DATA_RATES,
             hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_association_response'],
-            hostapd_constants.VENDOR_IE['zero_length_association_'
-                                        'response_without_data'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
+            hostapd_constants.VENDOR_IE["correct_length_association_response"],
+            hostapd_constants.VENDOR_IE[
+                "zero_length_association_" "response_without_data"
+            ],
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
 
     def test_minimum_ssid_length_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_min_len_2g['SSID'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_min_len_2g["SSID"],
+        )
         asserts.assert_true(
-            self.dut.associate(self.open_network_min_len_2g['SSID']),
-            'Failed to associate.')
+            self.dut.associate(self.open_network_min_len_2g["SSID"]),
+            "Failed to associate.",
+        )
 
     def test_minimum_ssid_length_5g_11ac_80mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_min_len_5g['SSID'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_min_len_5g["SSID"],
+        )
         asserts.assert_true(
-            self.dut.associate(self.open_network_min_len_5g['SSID']),
-            'Failed to associate.')
+            self.dut.associate(self.open_network_min_len_5g["SSID"]),
+            "Failed to associate.",
+        )
 
     def test_maximum_ssid_length_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_max_len_2g['SSID'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_max_len_2g["SSID"],
+        )
         asserts.assert_true(
-            self.dut.associate(self.open_network_max_len_2g['SSID']),
-            'Failed to associate.')
+            self.dut.associate(self.open_network_max_len_2g["SSID"]),
+            "Failed to associate.",
+        )
 
     def test_maximum_ssid_length_5g_11ac_80mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_max_len_5g['SSID'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_max_len_5g["SSID"],
+        )
         asserts.assert_true(
-            self.dut.associate(self.open_network_max_len_5g['SSID']),
-            'Failed to associate.')
+            self.dut.associate(self.open_network_max_len_5g["SSID"]),
+            "Failed to associate.",
+        )
 
     def test_ssid_with_UTF8_characters_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_5g_11ac_80mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.utf8_ssid_5g)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_5g),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.utf8_ssid_5g,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_5g), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_french_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_french)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_french),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_french,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_french), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_german_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_german)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_german),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_german,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_german), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_dutch_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_dutch)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_dutch),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_dutch,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_dutch), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_swedish_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_swedish)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_swedish),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_swedish,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_swedish), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_norwegian_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_norwegian)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_norwegian),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_norwegian,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_norwegian), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_danish_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_danish)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_danish),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_danish,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_danish), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_japanese_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_japanese)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_japanese),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_japanese,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_japanese), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_spanish_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_spanish)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_spanish),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_spanish,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_spanish), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_italian_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_italian)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_italian),
-                            'Failed to associate.')
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_italian,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_italian), "Failed to associate."
+        )
 
     def test_ssid_with_UTF8_characters_korean_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_korean)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_korean,
+        )
 
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_korean),
-                            'Failed to associate.')
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_korean), "Failed to associate."
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py b/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py
index ade2036..98f1903 100644
--- a/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py
+++ b/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py
@@ -14,19 +14,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import re
-
-from antlion import asserts
-from antlion import utils
 from functools import wraps
 
+from antlion import utils
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib.hostapd_security import Security
 from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 
-AP_11ABG_PROFILE_NAME = 'whirlwind_11ag_legacy'
+from mobly import asserts, test_runner
+
+AP_11ABG_PROFILE_NAME = "whirlwind_11ag_legacy"
 SSID_LENGTH_DEFAULT = 15
 
 
@@ -57,97 +57,102 @@
         Returns:
             The original function that was called
         """
-        utf8_password_2g = '2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢'
-        utf8_password_2g_french = 'du Feÿ Château'
-        utf8_password_2g_german = 'für Straßenatlas Rat'
-        utf8_password_2g_dutch = 'niet óúd, is níéuw! Die'
-        utf8_password_2g_swedish = 'femtioåtta Det är'
-        utf8_password_2g_norwegian = 'ØÆ Curaçao æ å å å'
-        #Danish and Norwegian has the same alphabet
+        utf8_password_2g = "2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢"
+        utf8_password_2g_french = "du Feÿ Château"
+        utf8_password_2g_german = "für Straßenatlas Rat"
+        utf8_password_2g_dutch = "niet óúd, is níéuw! Die"
+        utf8_password_2g_swedish = "femtioåtta Det är"
+        utf8_password_2g_norwegian = "ØÆ Curaçao æ å å å"
+        # Danish and Norwegian has the same alphabet
         utf8_password_2g_danish = utf8_password_2g_norwegian
-        utf8_password_2g_japanese = 'そっくりね。あな'
-        utf8_password_2g_spanish = 'á,é,í,ó,ú,ü,ñ,¿,¡ ¡No'
-        utf8_password_2g_italian = 'Pinocchio è italiano? caffè'
-        utf8_password_2g_korean = 'ㅜㅝㅞㅟㅠㅘㅙㅚㅛ'
+        utf8_password_2g_japanese = "そっくりね。あな"
+        utf8_password_2g_spanish = "á,é,í,ó,ú,ü,ñ,¿,¡ ¡No"
+        utf8_password_2g_italian = "Pinocchio è italiano? caffè"
+        utf8_password_2g_korean = "ㅜㅝㅞㅟㅠㅘㅙㅚㅛ"
 
-        security = re.search(r'sec(.*?)ptk_(.*)', test_func.__name__)
+        security = re.search(r"sec(.*?)ptk_(.*)", test_func.__name__)
         security_mode = security.group(1)
         ptk_type = security.group(2)
         wpa_cipher = None
         wpa2_cipher = None
-        if '_wpa_wpa2_wpa3_' in security_mode:
+        if "_wpa_wpa2_wpa3_" in security_mode:
             security_mode = hostapd_constants.WPA_WPA2_WPA3_MIXED_STRING
-        elif '_wpa_wpa2_' in security_mode:
+        elif "_wpa_wpa2_" in security_mode:
             security_mode = hostapd_constants.WPA_MIXED_STRING
-        elif '_wpa2_wpa3_' in security_mode:
+        elif "_wpa2_wpa3_" in security_mode:
             security_mode = hostapd_constants.WPA2_WPA3_MIXED_STRING
-        elif '_wep_' in security_mode:
+        elif "_wep_" in security_mode:
             security_mode = hostapd_constants.WEP_STRING
-        elif '_wpa_' in security_mode:
+        elif "_wpa_" in security_mode:
             security_mode = hostapd_constants.WPA_STRING
-        elif '_wpa2_' in security_mode:
+        elif "_wpa2_" in security_mode:
             security_mode = hostapd_constants.WPA2_STRING
-        elif '_wpa3_' in security_mode:
+        elif "_wpa3_" in security_mode:
             security_mode = hostapd_constants.WPA3_STRING
-        if 'tkip' in ptk_type and 'ccmp' in ptk_type:
-            wpa_cipher = 'TKIP CCMP'
-            wpa2_cipher = 'TKIP CCMP'
-        elif 'tkip' in ptk_type:
-            wpa_cipher = 'TKIP'
-            wpa2_cipher = 'TKIP'
-        elif 'ccmp' in ptk_type:
-            wpa_cipher = 'CCMP'
-            wpa2_cipher = 'CCMP'
-        if 'max_length_password' in test_func.__name__:
+        if "tkip" in ptk_type and "ccmp" in ptk_type:
+            wpa_cipher = "TKIP CCMP"
+            wpa2_cipher = "TKIP CCMP"
+        elif "tkip" in ptk_type:
+            wpa_cipher = "TKIP"
+            wpa2_cipher = "TKIP"
+        elif "ccmp" in ptk_type:
+            wpa_cipher = "CCMP"
+            wpa2_cipher = "CCMP"
+        if "max_length_password" in test_func.__name__:
             password = generate_random_password(
-                length=hostapd_constants.MAX_WPA_PASSWORD_LENGTH)
-        elif 'max_length_psk' in test_func.__name__:
+                length=hostapd_constants.MAX_WPA_PASSWORD_LENGTH
+            )
+        elif "max_length_psk" in test_func.__name__:
             password = str(
                 generate_random_password(
-                    length=hostapd_constants.MAX_WPA_PSK_LENGTH,
-                    hex=True)).lower()
-        elif 'wep_5_chars' in test_func.__name__:
+                    length=hostapd_constants.MAX_WPA_PSK_LENGTH, hex=True
+                )
+            ).lower()
+        elif "wep_5_chars" in test_func.__name__:
             password = generate_random_password(length=5)
-        elif 'wep_13_chars' in test_func.__name__:
+        elif "wep_13_chars" in test_func.__name__:
             password = generate_random_password(length=13)
-        elif 'wep_10_hex' in test_func.__name__:
-            password = str(generate_random_password(length=10,
-                                                    hex=True)).lower()
-        elif 'wep_26_hex' in test_func.__name__:
-            password = str(generate_random_password(length=26,
-                                                    hex=True)).lower()
-        elif 'utf8' in test_func.__name__:
-            if 'french' in test_func.__name__:
+        elif "wep_10_hex" in test_func.__name__:
+            password = str(generate_random_password(length=10, hex=True)).lower()
+        elif "wep_26_hex" in test_func.__name__:
+            password = str(generate_random_password(length=26, hex=True)).lower()
+        elif "utf8" in test_func.__name__:
+            if "french" in test_func.__name__:
                 password = utf8_password_2g_french
-            elif 'german' in test_func.__name__:
+            elif "german" in test_func.__name__:
                 password = utf8_password_2g_german
-            elif 'dutch' in test_func.__name__:
+            elif "dutch" in test_func.__name__:
                 password = utf8_password_2g_dutch
-            elif 'swedish' in test_func.__name__:
+            elif "swedish" in test_func.__name__:
                 password = utf8_password_2g_swedish
-            elif 'norwegian' in test_func.__name__:
+            elif "norwegian" in test_func.__name__:
                 password = utf8_password_2g_norwegian
-            elif 'danish' in test_func.__name__:
+            elif "danish" in test_func.__name__:
                 password = utf8_password_2g_danish
-            elif 'japanese' in test_func.__name__:
+            elif "japanese" in test_func.__name__:
                 password = utf8_password_2g_japanese
-            elif 'spanish' in test_func.__name__:
+            elif "spanish" in test_func.__name__:
                 password = utf8_password_2g_spanish
-            elif 'italian' in test_func.__name__:
+            elif "italian" in test_func.__name__:
                 password = utf8_password_2g_italian
-            elif 'korean' in test_func.__name__:
+            elif "korean" in test_func.__name__:
                 password = utf8_password_2g_korean
             else:
                 password = utf8_password_2g
         else:
             password = generate_random_password()
-        target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-            security_mode, None)
+        target_security = (
+            hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                security_mode, None
+            )
+        )
 
-        self.security_profile = Security(security_mode=security_mode,
-                                         password=password,
-                                         wpa_cipher=wpa_cipher,
-                                         wpa2_cipher=wpa2_cipher)
+        self.security_profile = Security(
+            security_mode=security_mode,
+            password=password,
+            wpa_cipher=wpa_cipher,
+            wpa2_cipher=wpa2_cipher,
+        )
         self.client_password = password
         self.target_security = target_security
         self.ssid = utils.rand_ascii_str(SSID_LENGTH_DEFAULT)
@@ -156,7 +161,7 @@
     return security_profile_generator
 
 
-class WlanSecurityComplianceABGTest(WifiBaseTest):
+class WlanSecurityComplianceABGTest(base_test.WifiBaseTest):
     """Tests for validating 11a, 11b, and 11g PHYS.
 
     Test Bed Requirement:
@@ -166,17 +171,17 @@
 
     def setup_class(self):
         super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
             self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
 
         self.access_point = self.access_points[0]
 
@@ -210,823 +215,1061 @@
 
     @create_security_profile
     def test_associate_11a_sec_open_wep_5_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_open_wep_13_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_open_wep_10_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_open_wep_26_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_shared_wep_5_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_shared_wep_13_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_shared_wep_10_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_shared_wep_26_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_password_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11a_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -1035,409 +1278,522 @@
             force_wmm=True,
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -1447,379 +1803,492 @@
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
             pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -1828,1871 +2297,2417 @@
             force_wmm=True,
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11a_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+    def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+    def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+    def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+    def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+    def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_open_wep_5_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_open_wep_13_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_open_wep_10_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_open_wep_26_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_shared_wep_5_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_shared_wep_13_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_shared_wep_10_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_shared_wep_26_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11bg_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -3701,375 +4716,489 @@
             force_wmm=True,
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11bg_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -4078,411 +5207,522 @@
             force_wmm=True,
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -4492,644 +5732,834 @@
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
             pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11bg_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -5138,264 +6568,344 @@
             force_wmm=True,
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+    def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -5404,275 +6914,357 @@
             force_wmm=True,
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -5682,267 +7274,349 @@
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
             pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+    def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -5951,278 +7625,361 @@
             force_wmm=True,
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+    def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
+        self,
+    ):
         setup_ap(
             access_point=self.access_point,
             profile_name=AP_11ABG_PROFILE_NAME,
@@ -6232,244 +7989,325 @@
             additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
             security=self.security_profile,
             pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-            password=self.client_password)
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            password=self.client_password,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_french_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_german_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_dutch_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_swedish_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
-    def test_associate_utf8_norwegian_password_11bg_sec_wpa2_psk_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+    def test_associate_utf8_norwegian_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_danish_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_japanese_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_spanish_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_italian_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
 
     @create_security_profile
     def test_associate_utf8_korean_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            password=self.client_password,
+            force_wmm=False,
+        )
 
         asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/facade/BUILD.gn b/src/antlion/tests/wlan/facade/BUILD.gn
new file mode 100644
index 0000000..c62ce70
--- /dev/null
+++ b/src/antlion/tests/wlan/facade/BUILD.gn
@@ -0,0 +1,32 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("wlan_deprecated_configuration_test") {
+  main_source = "WlanDeprecatedConfigurationTest.py"
+  environments = display_envs
+}
+
+antlion_host_test("wlan_facade_test") {
+  main_source = "WlanFacadeTest.py"
+  environments = display_envs
+}
+
+antlion_host_test("wlan_status_test") {
+  main_source = "WlanStatusTest.py"
+  environments = display_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":wlan_deprecated_configuration_test($host_toolchain)",
+    ":wlan_facade_test($host_toolchain)",
+    ":wlan_status_test($host_toolchain)",
+  ]
+}
diff --git a/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py b/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
index 843d18f..7fee369 100644
--- a/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
+++ b/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
@@ -14,22 +14,23 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import asserts
 from antlion import utils
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 
-AP_ROLE = 'Ap'
-DEFAULT_SSID = 'testssid'
-DEFAULT_SECURITY = 'none'
-DEFAULT_PASSWORD = ''
-DEFAULT_CONNECTIVITY_MODE = 'local_only'
-DEFAULT_OPERATING_BAND = 'any'
-TEST_MAC_ADDR = '12:34:56:78:9a:bc'
-TEST_MAC_ADDR_SECONDARY = 'bc:9a:78:56:34:12'
+from mobly import asserts, test_runner
+
+AP_ROLE = "Ap"
+DEFAULT_SSID = "testssid"
+DEFAULT_SECURITY = "none"
+DEFAULT_PASSWORD = ""
+DEFAULT_CONNECTIVITY_MODE = "local_only"
+DEFAULT_OPERATING_BAND = "any"
+TEST_MAC_ADDR = "12:34:56:78:9a:bc"
+TEST_MAC_ADDR_SECONDARY = "bc:9a:78:56:34:12"
 
 
-class WlanDeprecatedConfigurationTest(WifiBaseTest):
+class WlanDeprecatedConfigurationTest(base_test.WifiBaseTest):
     """Tests for WlanDeprecatedConfigurationFacade"""
 
     def setup_class(self):
@@ -53,28 +54,31 @@
             AttributeError, if no interface has role 'Ap'
         """
         wlan_ifaces = self.dut.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if wlan_ifaces.get('error'):
-            raise ConnectionError('Failed to get wlan interface IDs: %s' %
-                                  wlan_ifaces['error'])
+        if wlan_ifaces.get("error"):
+            raise ConnectionError(
+                "Failed to get wlan interface IDs: %s" % wlan_ifaces["error"]
+            )
 
-        for wlan_iface in wlan_ifaces['result']:
-            iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(
-                wlan_iface)
-            if iface_info.get('error'):
-                raise ConnectionError('Failed to query wlan iface: %s' %
-                                      iface_info['error'])
+        for wlan_iface in wlan_ifaces["result"]:
+            iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(wlan_iface)
+            if iface_info.get("error"):
+                raise ConnectionError(
+                    "Failed to query wlan iface: %s" % iface_info["error"]
+                )
 
-            if iface_info['result']['role'] == AP_ROLE:
-                if 'mac_addr' in iface_info['result']:
+            if iface_info["result"]["role"] == AP_ROLE:
+                if "mac_addr" in iface_info["result"]:
                     return utils.mac_address_list_to_str(
-                            iface_info['result']['mac_addr'])
-                elif 'sta_addr' in iface_info['result']:
+                        iface_info["result"]["mac_addr"]
+                    )
+                elif "sta_addr" in iface_info["result"]:
                     return utils.mac_address_list_to_str(
-                            iface_info['result']['sta_addr'])
-                raise AttributeError(
-                    'AP iface info does not contain MAC address.')
+                        iface_info["result"]["sta_addr"]
+                    )
+                raise AttributeError("AP iface info does not contain MAC address.")
         raise AttributeError(
-            'Failed to get ap interface mac address. No AP interface found.')
+            "Failed to get ap interface mac address. No AP interface found."
+        )
 
     def _start_soft_ap(self):
         """Starts SoftAP on DUT.
@@ -82,14 +86,16 @@
         Raises:
             ConnectionError, if SL4F call fails.
         """
-        self.log.info('Starting SoftAP on Fuchsia device (%s).' %
-                      self.dut.device.ip)
+        self.log.info("Starting SoftAP on Fuchsia device (%s)." % self.dut.device.ip)
         response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-            DEFAULT_SSID, DEFAULT_SECURITY, DEFAULT_PASSWORD,
-            DEFAULT_CONNECTIVITY_MODE, DEFAULT_OPERATING_BAND)
-        if response.get('error'):
-            raise ConnectionError('Failed to setup SoftAP: %s' %
-                                  response['error'])
+            DEFAULT_SSID,
+            DEFAULT_SECURITY,
+            DEFAULT_PASSWORD,
+            DEFAULT_CONNECTIVITY_MODE,
+            DEFAULT_OPERATING_BAND,
+        )
+        if response.get("error"):
+            raise ConnectionError("Failed to setup SoftAP: %s" % response["error"])
 
     def _stop_soft_aps(self):
         """Stops SoftAP on DUT.
@@ -97,12 +103,10 @@
         Raises:
             ConnectionError, if SL4F call fails.
         """
-        self.log.info('Stopping SoftAP.')
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint(
-        )
-        if response.get('error'):
-            raise ConnectionError('Failed to stop SoftAP: %s' %
-                                  response['error'])
+        self.log.info("Stopping SoftAP.")
+        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
+        if response.get("error"):
+            raise ConnectionError("Failed to stop SoftAP: %s" % response["error"])
 
     def _suggest_ap_mac_addr(self, mac_addr):
         """Suggests mac address for AP interface.
@@ -113,16 +117,20 @@
             TestFailure, if SL4F call fails.
         """
         self.log.info(
-            'Suggesting AP mac addr (%s) via wlan_deprecated_configuration_lib.'
-            % mac_addr)
-        response = (self.dut.device.sl4f.wlan_deprecated_configuration_lib.
-                    wlanSuggestAccessPointMacAddress(mac_addr))
-        if response.get('error'):
-            asserts.fail('Failed to suggest AP mac address (%s): %s' %
-                         (mac_addr, response['error']))
+            "Suggesting AP mac addr (%s) via wlan_deprecated_configuration_lib."
+            % mac_addr
+        )
+        response = self.dut.device.sl4f.wlan_deprecated_configuration_lib.wlanSuggestAccessPointMacAddress(
+            mac_addr
+        )
+        if response.get("error"):
+            asserts.fail(
+                "Failed to suggest AP mac address (%s): %s"
+                % (mac_addr, response["error"])
+            )
 
     def _verify_mac_addr(self, expected_addr):
-        """ Verifies mac address of ap interface is set to expected mac address.
+        """Verifies mac address of ap interface is set to expected mac address.
 
         Args:
             Args:
@@ -134,12 +142,12 @@
         set_mac_addr = self._get_ap_interface_mac_address()
         if set_mac_addr != expected_addr:
             asserts.fail(
-                'Failed to set AP mac address '
-                'via wlan_deprecated_configuration_lib. Expected mac addr: %s,'
-                ' Actual mac addr: %s' % (expected_addr, set_mac_addr))
+                "Failed to set AP mac address "
+                "via wlan_deprecated_configuration_lib. Expected mac addr: %s,"
+                " Actual mac addr: %s" % (expected_addr, set_mac_addr)
+            )
         else:
-            self.log.info('AP mac address successfully set to %s' %
-                          expected_addr)
+            self.log.info("AP mac address successfully set to %s" % expected_addr)
 
     def test_suggest_ap_mac_address(self):
         """Tests suggest ap mac address SL4F call
@@ -159,9 +167,9 @@
         # Retrieve initial ap mac address
         self._start_soft_ap()
 
-        self.log.info('Getting initial mac address.')
+        self.log.info("Getting initial mac address.")
         initial_mac_addr = self._get_ap_interface_mac_address()
-        self.log.info('Initial mac address: %s' % initial_mac_addr)
+        self.log.info("Initial mac address: %s" % initial_mac_addr)
 
         if initial_mac_addr != TEST_MAC_ADDR:
             suggested_mac_addr = TEST_MAC_ADDR
@@ -180,10 +188,13 @@
         self._stop_soft_aps()
 
         # Reset to initial mac address and verify
-        self.log.info('Resetting to initial mac address (%s).' %
-                      initial_mac_addr)
+        self.log.info("Resetting to initial mac address (%s)." % initial_mac_addr)
         self._suggest_ap_mac_addr(initial_mac_addr)
 
         self._start_soft_ap()
 
         self._verify_mac_addr(initial_mac_addr)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/facade/WlanFacadeTest.py b/src/antlion/tests/wlan/facade/WlanFacadeTest.py
index 5a1d133..fdddf69 100644
--- a/src/antlion/tests/wlan/facade/WlanFacadeTest.py
+++ b/src/antlion/tests/wlan/facade/WlanFacadeTest.py
@@ -19,59 +19,63 @@
 """
 import array
 
-from antlion import asserts, signals
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 
+from mobly import asserts, signals, test_runner
 
-class WlanFacadeTest(WifiBaseTest):
 
+class WlanFacadeTest(base_test.WifiBaseTest):
     def setup_class(self):
         super().setup_class()
         if len(self.fuchsia_devices) < 1:
             raise signals.TestAbortClass(
                 "Sorry, please try verifying FuchsiaDevice is in your "
-                "config file and try again.")
+                "config file and try again."
+            )
         self.dut = create_wlan_device(self.fuchsia_devices[0])
 
     def test_get_phy_id_list(self):
         result = self.dut.device.sl4f.wlan_lib.wlanPhyIdList()
-        error = result['error']
+        error = result["error"]
         asserts.assert_true(error is None, error)
 
-        self.log.info('Got Phy IDs %s' % result['result'])
+        self.log.info("Got Phy IDs %s" % result["result"])
         return True
 
     def test_get_country(self):
         wlan_lib = self.dut.device.sl4f.wlan_lib
 
         result = wlan_lib.wlanPhyIdList()
-        error = result['error']
+        error = result["error"]
         asserts.assert_true(error is None, error)
-        phy_id = result['result'][0]
+        phy_id = result["result"][0]
 
         result = wlan_lib.wlanGetCountry(phy_id)
-        error = result['error']
+        error = result["error"]
         asserts.assert_true(error is None, error)
 
-        country_bytes = result['result']
-        country_string = str(array.array('b', country_bytes),
-                             encoding='us-ascii')
-        self.log.info('Got country %s (%s)', country_string, country_bytes)
+        country_bytes = result["result"]
+        country_string = str(array.array("b", country_bytes), encoding="us-ascii")
+        self.log.info("Got country %s (%s)", country_string, country_bytes)
         return True
 
     def test_get_dev_path(self):
         wlan_lib = self.dut.device.sl4f.wlan_lib
 
         result = wlan_lib.wlanPhyIdList()
-        error = result['error']
+        error = result["error"]
         asserts.assert_true(error is None, error)
-        phy_id = result['result'][0]
+        phy_id = result["result"][0]
 
         result = wlan_lib.wlanGetDevPath(phy_id)
-        error = result['error']
+        error = result["error"]
         asserts.assert_true(error is None, error)
 
-        dev_path = result['result']
-        self.log.info('Got device path: %s', dev_path)
+        dev_path = result["result"]
+        self.log.info("Got device path: %s", dev_path)
         return True
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/facade/WlanStatusTest.py b/src/antlion/tests/wlan/facade/WlanStatusTest.py
index 4c35c1e..cf70b0d 100644
--- a/src/antlion/tests/wlan/facade/WlanStatusTest.py
+++ b/src/antlion/tests/wlan/facade/WlanStatusTest.py
@@ -17,11 +17,12 @@
 Test to verify that a DUT's client interface's status can be queried.
 """
 
-from antlion import signals
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
+
+from mobly import signals, test_runner
 
 
-class WlanStatusTest(WifiBaseTest):
+class WlanStatusTest(base_test.WifiBaseTest):
     """WLAN status test class.
 
     Test Bed Requirements:
@@ -31,14 +32,16 @@
     def setup_class(self):
         super().setup_class()
         for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
 
     def on_fail(self, test_name, begin_time):
         for fd in self.fuchsia_devices:
             super().on_device_fail(fd, test_name, begin_time)
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
 
     def test_wlan_stopped_client_status(self):
         """Queries WLAN status on DUTs with no WLAN ifaces.
@@ -52,8 +55,7 @@
             status = fd.sl4f.wlan_lib.wlanStatus()
             self.log.debug(status)
             if not status["error"] or status["result"]:
-                raise signals.TestFailure(
-                    "DUT's WLAN client status should be empty")
+                raise signals.TestFailure("DUT's WLAN client status should be empty")
 
         raise signals.TestPass("Success")
 
@@ -65,13 +67,19 @@
         status.
         """
         for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
 
             status = fd.sl4f.wlan_lib.wlanStatus()
             self.log.debug(status)
             if status["error"] or not status["result"]:
                 raise signals.TestFailure(
-                    "DUT's WLAN client status should be populated")
+                    "DUT's WLAN client status should be populated"
+                )
 
         raise signals.TestPass("Success")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/BUILD.gn b/src/antlion/tests/wlan/functional/BUILD.gn
new file mode 100644
index 0000000..7171f0a
--- /dev/null
+++ b/src/antlion/tests/wlan/functional/BUILD.gn
@@ -0,0 +1,97 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("beacon_loss_test") {
+  main_source = "BeaconLossTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("channel_switch_test") {
+  main_source = "ChannelSwitchTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("connection_stress_test") {
+  main_source = "ConnectionStressTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("download_stress_test") {
+  main_source = "DownloadStressTest.py"
+
+  # Requires external internet access. This is considered bad practice for an
+  # automated test due to reliance on external services. Will remain an at-desk
+  # dest until rewritten to remove dependance on external services.
+  # environments = display_ap_envs
+  environments = []
+}
+
+antlion_host_test("ping_stress_test") {
+  main_source = "PingStressTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("soft_ap_test") {
+  main_source = "SoftApTest.py"
+
+  # Requires one Fuchsia device and one Anddroid device. There are no
+  # infra-hosted environments to run this test on. Will likely remain an at-desk
+  # test for as long as it requires an Android device.
+  environments = []
+}
+
+antlion_host_test("wlan_reboot_test") {
+  main_source = "WlanRebootTest.py"
+  test_params = "wlan_reboot_test_params.yaml"
+  environments = display_ap_iperf_envs
+}
+
+antlion_host_test("wlan_scan_test") {
+  main_source = "WlanScanTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_target_security_test") {
+  main_source = "WlanTargetSecurityTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_wireless_network_management_test") {
+  main_source = "WlanWirelessNetworkManagementTest.py"
+  environments = display_ap_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":beacon_loss_test($host_toolchain)",
+    ":channel_switch_test($host_toolchain)",
+    ":ping_stress_test($host_toolchain)",
+    ":wlan_reboot_test($host_toolchain)",
+    ":wlan_scan_test($host_toolchain)",
+    ":wlan_target_security_test($host_toolchain)",
+    ":wlan_wireless_network_management_test($host_toolchain)",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    ":ping_stress_test($host_toolchain)",
+  ]
+}
+
+# Tests that are disabled in automation
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    ":download_stress_test($host_toolchain)",
+    ":soft_ap_test($host_toolchain)",
+  ]
+}
diff --git a/src/antlion/tests/wlan/functional/BeaconLossTest.py b/src/antlion/tests/wlan/functional/BeaconLossTest.py
index 6c8b74a..60c6a78 100644
--- a/src/antlion/tests/wlan/functional/BeaconLossTest.py
+++ b/src/antlion/tests/wlan/functional/BeaconLossTest.py
@@ -24,18 +24,16 @@
 
 import time
 
-from antlion import asserts
-from antlion import signals
-from antlion import utils
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
-
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
+from antlion.test_utils.wifi import base_test
 from antlion.utils import rand_ascii_str
 
+from mobly import asserts, test_runner
 
-class BeaconLossTest(WifiBaseTest):
+
+class BeaconLossTest(base_test.WifiBaseTest):
     # Default number of test iterations here.
     # Override using parameter in config file.
     # Eg: "beacon_loss_test_iterations": "10"
@@ -57,29 +55,29 @@
     def setup_class(self):
         super().setup_class()
         self.ssid = rand_ascii_str(10)
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
             self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
+
         self.access_point = self.access_points[0]
         self.num_of_iterations = int(
-            self.user_params.get("beacon_loss_test_iterations",
-                                 self.num_of_iterations))
+            self.user_params.get("beacon_loss_test_iterations", self.num_of_iterations)
+        )
         self.in_use_interface = None
 
     def teardown_test(self):
         self.dut.disconnect()
         self.dut.reset_wifi()
         # ensure radio is on, in case the test failed while the radio was off
-        self.access_point.iwconfig.ap_iwconfig(self.in_use_interface,
-                                               "txpower on")
+        self.access_point.iwconfig.ap_iwconfig(self.in_use_interface, "txpower on")
         self.download_ap_logs()
         self.access_point.stop_all_aps()
 
@@ -88,10 +86,12 @@
         self.access_point.stop_all_aps()
 
     def beacon_loss(self, channel):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=channel,
-                 ssid=self.ssid)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=channel,
+            ssid=self.ssid,
+        )
         time.sleep(self.wait_ap_startup_s)
         if channel > 14:
             self.in_use_interface = self.access_point.wlan_5g
@@ -103,25 +103,22 @@
         self.log.info("sending associate command for ssid %s", self.ssid)
         self.dut.associate(target_ssid=self.ssid)
 
-        asserts.assert_true(self.dut.is_connected(), 'Failed to connect.')
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
 
         time.sleep(self.wait_client_connection_setup_s)
 
         for _ in range(0, self.num_of_iterations):
             # Turn off AP radio
             self.log.info("turning off radio")
-            self.access_point.iwconfig.ap_iwconfig(self.in_use_interface,
-                                                   "txpower off")
+            self.access_point.iwconfig.ap_iwconfig(self.in_use_interface, "txpower off")
             time.sleep(self.wait_after_ap_txoff_s)
 
             # Did we disconnect from AP?
-            asserts.assert_false(self.dut.is_connected(),
-                                 'Failed to disconnect.')
+            asserts.assert_false(self.dut.is_connected(), "Failed to disconnect.")
 
             # Turn on AP radio
             self.log.info("turning on radio")
-            self.access_point.iwconfig.ap_iwconfig(self.in_use_interface,
-                                                   "txpower on")
+            self.access_point.iwconfig.ap_iwconfig(self.in_use_interface, "txpower on")
             time.sleep(self.wait_to_connect_after_ap_txon_s)
 
             # Tell the client to connect
@@ -130,8 +127,7 @@
             time.sleep(self.wait_client_connection_setup_s)
 
             # Did we connect back to WiFi?
-            asserts.assert_true(self.dut.is_connected(),
-                                'Failed to connect back.')
+            asserts.assert_true(self.dut.is_connected(), "Failed to connect back.")
 
         return True
 
@@ -140,3 +136,7 @@
 
     def test_beacon_loss_5g(self):
         self.beacon_loss(channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/ChannelSwitchTest.py b/src/antlion/tests/wlan/functional/ChannelSwitchTest.py
index 1c62919..b9a674c 100644
--- a/src/antlion/tests/wlan/functional/ChannelSwitchTest.py
+++ b/src/antlion/tests/wlan/functional/ChannelSwitchTest.py
@@ -19,17 +19,18 @@
 
 import random
 import time
+from typing import Sequence
 
-from antlion import asserts
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.utils import rand_ascii_str
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from typing import Sequence
+
+from mobly import asserts, test_runner
 
 
-class ChannelSwitchTest(WifiBaseTest):
+class ChannelSwitchTest(base_test.WifiBaseTest):
     # Time to wait between issuing channel switches
     WAIT_BETWEEN_CHANNEL_SWITCHES_S = 15
 
@@ -46,17 +47,18 @@
     def setup_class(self) -> None:
         super().setup_class()
         self.ssid = rand_ascii_str(10)
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
             self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
+
         self.access_point = self.access_points[0]
         self._stop_all_soft_aps()
         self.in_use_interface = None
@@ -68,11 +70,13 @@
         self.access_point.stop_all_aps()
 
     # TODO(fxbug.dev/85738): Change band type to an enum.
-    def channel_switch(self,
-                       band: str,
-                       starting_channel: int,
-                       channel_switches: Sequence[int],
-                       test_with_soft_ap: bool = False) -> None:
+    def channel_switch(
+        self,
+        band: str,
+        starting_channel: int,
+        channel_switches: Sequence[int],
+        test_with_soft_ap: bool = False,
+    ) -> None:
         """Setup and run a channel switch test with the given parameters.
 
         Creates an AP, associates to it, and then issues channel switches
@@ -95,7 +99,8 @@
         """
         asserts.assert_true(
             band in [hostapd_constants.BAND_2G, hostapd_constants.BAND_5G],
-            'Failed to setup AP, invalid band {}'.format(band))
+            "Failed to setup AP, invalid band {}".format(band),
+        )
 
         self.current_channel_num = starting_channel
         if band == hostapd_constants.BAND_5G:
@@ -104,37 +109,46 @@
             self.in_use_interface = self.access_point.wlan_2g
         asserts.assert_true(
             self._channels_valid_for_band([self.current_channel_num], band),
-            'starting channel {} not a valid channel for band {}'.format(
-                self.current_channel_num, band))
+            "starting channel {} not a valid channel for band {}".format(
+                self.current_channel_num, band
+            ),
+        )
 
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=self.current_channel_num,
-                 ssid=self.ssid)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=self.current_channel_num,
+            ssid=self.ssid,
+        )
         if test_with_soft_ap:
             self._start_soft_ap()
-        self.log.info('sending associate command for ssid %s', self.ssid)
+        self.log.info("sending associate command for ssid %s", self.ssid)
         self.dut.associate(target_ssid=self.ssid)
-        asserts.assert_true(self.dut.is_connected(), 'Failed to connect.')
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
 
-        asserts.assert_true(channel_switches,
-                            'Cannot run test, no channels to switch to')
+        asserts.assert_true(
+            channel_switches, "Cannot run test, no channels to switch to"
+        )
         asserts.assert_true(
             self._channels_valid_for_band(channel_switches, band),
-            'channel_switches {} includes invalid channels for band {}'.format(
-                channel_switches, band))
+            "channel_switches {} includes invalid channels for band {}".format(
+                channel_switches, band
+            ),
+        )
 
         for channel_num in channel_switches:
             if channel_num == self.current_channel_num:
                 continue
-            self.log.info('channel switch: {} -> {}'.format(
-                self.current_channel_num, channel_num))
-            self.access_point.channel_switch(self.in_use_interface,
-                                             channel_num)
+            self.log.info(
+                "channel switch: {} -> {}".format(self.current_channel_num, channel_num)
+            )
+            self.access_point.channel_switch(self.in_use_interface, channel_num)
             channel_num_after_switch = self.access_point.get_current_channel(
-                self.in_use_interface)
-            asserts.assert_equal(channel_num_after_switch, channel_num,
-                                 'AP failed to channel switch')
+                self.in_use_interface
+            )
+            asserts.assert_equal(
+                channel_num_after_switch, channel_num, "AP failed to channel switch"
+            )
             self.current_channel_num = channel_num
 
             # Check periodically to see if DUT stays connected. Sometimes
@@ -142,18 +156,23 @@
             for _ in range(self.WAIT_BETWEEN_CHANNEL_SWITCHES_S):
                 asserts.assert_true(
                     self.dut.is_connected(),
-                    'Failed to stay connected after channel switch.')
+                    "Failed to stay connected after channel switch.",
+                )
                 client_channel = self._client_channel()
                 asserts.assert_equal(
-                    client_channel, channel_num,
-                    'Client interface on wrong channel ({})'.format(
-                        client_channel))
+                    client_channel,
+                    channel_num,
+                    "Client interface on wrong channel ({})".format(client_channel),
+                )
                 if test_with_soft_ap:
                     soft_ap_channel = self._soft_ap_channel()
                     asserts.assert_equal(
-                        soft_ap_channel, channel_num,
-                        'SoftAP interface on wrong channel ({})'.format(
-                            soft_ap_channel))
+                        soft_ap_channel,
+                        channel_num,
+                        "SoftAP interface on wrong channel ({})".format(
+                            soft_ap_channel
+                        ),
+                    )
                 time.sleep(1)
 
     def test_channel_switch_2g(self) -> None:
@@ -161,7 +180,8 @@
         self.channel_switch(
             band=hostapd_constants.BAND_2G,
             starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            channel_switches=hostapd_constants.US_CHANNELS_2G)
+            channel_switches=hostapd_constants.US_CHANNELS_2G,
+        )
 
     def test_channel_switch_2g_with_soft_ap(self) -> None:
         """Channel switch through (US only) 2 Ghz channels with SoftAP up."""
@@ -169,18 +189,20 @@
             band=hostapd_constants.BAND_2G,
             starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             channel_switches=hostapd_constants.US_CHANNELS_2G,
-            test_with_soft_ap=True)
+            test_with_soft_ap=True,
+        )
 
     def test_channel_switch_2g_shuffled_with_soft_ap(self) -> None:
         """Switch through shuffled (US only) 2 Ghz channels with SoftAP up."""
         channels = hostapd_constants.US_CHANNELS_2G
         random.shuffle(channels)
-        self.log.info('Shuffled channel switch sequence: {}'.format(channels))
+        self.log.info("Shuffled channel switch sequence: {}".format(channels))
         self.channel_switch(
             band=hostapd_constants.BAND_2G,
             starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             channel_switches=channels,
-            test_with_soft_ap=True)
+            test_with_soft_ap=True,
+        )
 
     # TODO(fxbug.dev/84777): This test fails.
     def test_channel_switch_5g(self) -> None:
@@ -188,7 +210,8 @@
         self.channel_switch(
             band=hostapd_constants.BAND_5G,
             starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_switches=hostapd_constants.US_CHANNELS_5G)
+            channel_switches=hostapd_constants.US_CHANNELS_5G,
+        )
 
     # TODO(fxbug.dev/84777): This test fails.
     def test_channel_switch_5g_with_soft_ap(self) -> None:
@@ -197,22 +220,23 @@
             band=hostapd_constants.BAND_5G,
             starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             channel_switches=hostapd_constants.US_CHANNELS_5G,
-            test_with_soft_ap=True)
+            test_with_soft_ap=True,
+        )
 
     def test_channel_switch_5g_shuffled_with_soft_ap(self) -> None:
         """Switch through shuffled (US only) 5 Ghz channels with SoftAP up."""
         channels = hostapd_constants.US_CHANNELS_5G
         random.shuffle(channels)
-        self.log.info('Shuffled channel switch sequence: {}'.format(channels))
+        self.log.info("Shuffled channel switch sequence: {}".format(channels))
         self.channel_switch(
             band=hostapd_constants.BAND_5G,
             starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             channel_switches=channels,
-            test_with_soft_ap=True)
+            test_with_soft_ap=True,
+        )
 
     # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_115(
-            self) -> None:
+    def test_channel_switch_regression_global_operating_class_115(self) -> None:
         """Channel switch into, through, and out of global op. class 115 channels.
 
         Global operating class 115 is described in IEEE 802.11-2016 Table E-4.
@@ -224,11 +248,13 @@
         self.channel_switch(
             band=hostapd_constants.BAND_5G,
             starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
-            channel_switches=channels)
+            channel_switches=channels,
+        )
 
     # TODO(fxbug.dev/84777): This test fails.
     def test_channel_switch_regression_global_operating_class_115_with_soft_ap(
-            self) -> None:
+        self,
+    ) -> None:
         """Test global operating class 124 channel switches, with SoftAP.
 
         Regression test for fxbug.dev/84777.
@@ -240,11 +266,11 @@
             band=hostapd_constants.BAND_5G,
             starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
             channel_switches=channels,
-            test_with_soft_ap=True)
+            test_with_soft_ap=True,
+        )
 
     # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_124(
-            self) -> None:
+    def test_channel_switch_regression_global_operating_class_124(self) -> None:
         """Switch into, through, and out of global op. class 124 channels.
 
         Global operating class 124 is described in IEEE 802.11-2016 Table E-4.
@@ -256,11 +282,13 @@
         self.channel_switch(
             band=hostapd_constants.BAND_5G,
             starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
-            channel_switches=channels)
+            channel_switches=channels,
+        )
 
     # TODO(fxbug.dev/84777): This test fails.
     def test_channel_switch_regression_global_operating_class_124_with_soft_ap(
-            self) -> None:
+        self,
+    ) -> None:
         """Test global operating class 124 channel switches, with SoftAP.
 
         Regression test for fxbug.dev/64279.
@@ -272,10 +300,10 @@
             band=hostapd_constants.BAND_5G,
             starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
             channel_switches=channels,
-            test_with_soft_ap=True)
+            test_with_soft_ap=True,
+        )
 
-    def _channels_valid_for_band(self, channels: Sequence[int],
-                                 band: str) -> bool:
+    def _channels_valid_for_band(self, channels: Sequence[int], band: str) -> bool:
         """Determine if the channels are valid for the band (US only).
 
         Args:
@@ -287,7 +315,7 @@
         elif band == hostapd_constants.BAND_5G:
             band_channels = frozenset(hostapd_constants.US_CHANNELS_5G)
         else:
-            asserts.fail('Invalid band {}'.format(band))
+            asserts.fail("Invalid band {}".format(band))
         channels_set = frozenset(channels)
         if channels_set <= band_channels:
             return True
@@ -300,19 +328,21 @@
             EnvironmentError: if the SoftAP does not start
         """
         ssid = rand_ascii_str(10)
-        security_type = 'none'
-        password = ''
-        connectivity_mode = 'local_only'
-        operating_band = 'any'
+        security_type = "none"
+        password = ""
+        connectivity_mode = "local_only"
+        operating_band = "any"
 
-        self.log.info('Starting SoftAP on DUT')
+        self.log.info("Starting SoftAP on DUT")
 
         response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-            ssid, security_type, password, connectivity_mode, operating_band)
-        if response.get('error'):
-            raise EnvironmentError('SL4F: Failed to setup SoftAP. Err: %s' %
-                                   response['error'])
-        self.log.info('SoftAp network (%s) is up.' % ssid)
+            ssid, security_type, password, connectivity_mode, operating_band
+        )
+        if response.get("error"):
+            raise EnvironmentError(
+                "SL4F: Failed to setup SoftAP. Err: %s" % response["error"]
+            )
+        self.log.info("SoftAp network (%s) is up." % ssid)
 
     def _stop_all_soft_aps(self) -> None:
         """Stops all SoftAPs on Fuchsia Device.
@@ -320,12 +350,11 @@
         Raises:
             EnvironmentError: if SoftAP stop call fails
         """
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint(
-        )
-        if response.get('error'):
+        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
+        if response.get("error"):
             raise EnvironmentError(
-                'SL4F: Failed to stop all SoftAPs. Err: %s' %
-                response['error'])
+                "SL4F: Failed to stop all SoftAPs. Err: %s" % response["error"]
+            )
 
     def _client_channel(self) -> int:
         """Determine the channel of the DUT client interface.
@@ -340,15 +369,15 @@
                 determined
         """
         status = self.dut.status()
-        if status['error']:
-            raise EnvironmentError('Could not determine client channel')
+        if status["error"]:
+            raise EnvironmentError("Could not determine client channel")
 
-        result = status['result']
+        result = status["result"]
         if isinstance(result, dict):
-            if result.get('Connected'):
-                return result['Connected']['channel']['primary']
-            asserts.fail('Client interface not connected')
-        raise EnvironmentError('Could not determine client channel')
+            if result.get("Connected"):
+                return result["Connected"]["channel"]["primary"]
+            asserts.fail("Client interface not connected")
+        raise EnvironmentError("Could not determine client channel")
 
     def _soft_ap_channel(self) -> int:
         """Determine the channel of the DUT SoftAP interface.
@@ -364,16 +393,20 @@
         iface_ids = self.dut.get_wlan_interface_id_list()
         for iface_id in iface_ids:
             query = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(iface_id)
-            if query['error']:
+            if query["error"]:
                 continue
-            query_result = query['result']
-            if type(query_result) is dict and query_result.get('role') == 'Ap':
+            query_result = query["result"]
+            if type(query_result) is dict and query_result.get("role") == "Ap":
                 status = self.dut.device.sl4f.wlan_lib.wlanStatus(iface_id)
-                if status['error']:
+                if status["error"]:
                     continue
-                status_result = status['result']
+                status_result = status["result"]
                 if isinstance(status_result, dict):
-                    if status_result.get('Connected'):
-                        return status_result['Connected']['channel']['primary']
-                    asserts.fail('SoftAP interface not connected')
-        raise EnvironmentError('Could not determine SoftAP channel')
+                    if status_result.get("Connected"):
+                        return status_result["Connected"]["channel"]["primary"]
+                    asserts.fail("SoftAP interface not connected")
+        raise EnvironmentError("Could not determine SoftAP channel")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/ConnectionStressTest.py b/src/antlion/tests/wlan/functional/ConnectionStressTest.py
index 635c902..fa52c7f 100644
--- a/src/antlion/tests/wlan/functional/ConnectionStressTest.py
+++ b/src/antlion/tests/wlan/functional/ConnectionStressTest.py
@@ -20,16 +20,17 @@
 
 import time
 
-from antlion import signals
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 from antlion.utils import rand_ascii_str
 
+from mobly import signals, test_runner
 
-class ConnectionStressTest(WifiBaseTest):
+
+class ConnectionStressTest(base_test.WifiBaseTest):
     # Default number of test iterations here.
     # Override using parameter in config file.
     # Eg: "connection_stress_test_iterations": "50"
@@ -44,9 +45,11 @@
         self.dut = create_wlan_device(self.fd)
         self.access_point = self.access_points[0]
         self.num_of_iterations = int(
-            self.user_params.get("connection_stress_test_iterations",
-                                 self.num_of_iterations))
-        self.log.info('iterations: %d' % self.num_of_iterations)
+            self.user_params.get(
+                "connection_stress_test_iterations", self.num_of_iterations
+            )
+        )
+        self.log.info("iterations: %d" % self.num_of_iterations)
 
     def teardown_test(self):
         self.dut.reset_wifi()
@@ -64,18 +67,18 @@
             profile: Profile name such as 'whirlwind'
             channel: Channel to operate on
         """
-        self.log.info('Profile: %s, Channel: %d' % (profile, channel))
-        setup_ap(access_point=self.access_point,
-                 profile_name=profile,
-                 channel=channel,
-                 ssid=self.ssid,
-                 security=security)
+        self.log.info("Profile: %s, Channel: %d" % (profile, channel))
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=profile,
+            channel=channel,
+            ssid=self.ssid,
+            security=security,
+        )
 
-    def connect_disconnect(self,
-                           ap_config,
-                           ssid=None,
-                           password=None,
-                           negative_test=False):
+    def connect_disconnect(
+        self, ap_config, ssid=None, password=None, negative_test=False
+    ):
         """Helper to start an AP, connect DUT to it and disconnect
 
         Args:
@@ -83,21 +86,24 @@
             ssid: ssid to connect to
             password: password for the ssid to connect to
         """
-        security_mode = ap_config.get('security_mode', None)
-        target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-            security_mode, None)
+        security_mode = ap_config.get("security_mode", None)
+        target_security = (
+            hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                security_mode, None
+            )
+        )
 
         if security_mode:
             security_profile = hostapd_security.Security(
-                security_mode=ap_config['security_mode'],
-                password=ap_config['password'])
+                security_mode=ap_config["security_mode"], password=ap_config["password"]
+            )
         else:
             security_profile = None
 
         # Start AP
-        self.start_ap(ap_config['profile'],
-                      ap_config['channel'],
-                      security=security_profile)
+        self.start_ap(
+            ap_config["profile"], ap_config["channel"], security=security_profile
+        )
 
         failed = False
         # Connect and Disconnect several times
@@ -105,21 +111,22 @@
             if not ssid:
                 ssid = self.ssid
             if negative_test:
-                if not self.dut.associate(ssid,
-                                          target_pwd=password,
-                                          target_security=target_security):
-                    self.log.info(
-                        'Attempt %d. Did not associate as expected.' % x)
+                if not self.dut.associate(
+                    ssid, target_pwd=password, target_security=target_security
+                ):
+                    self.log.info("Attempt %d. Did not associate as expected." % x)
                 else:
-                    self.log.error('Attempt %d. Negative test successfully '
-                                   'associated. Fail.' % x)
+                    self.log.error(
+                        "Attempt %d. Negative test successfully "
+                        "associated. Fail." % x
+                    )
                     failed = True
             else:
                 # Connect
                 if self.dut.associate(ssid, target_pwd=password):
-                    self.log.info('Attempt %d. Successfully associated' % x)
+                    self.log.info("Attempt %d. Successfully associated" % x)
                 else:
-                    self.log.error('Attempt %d. Failed to associate.' % x)
+                    self.log.error("Attempt %d. Failed to associate." % x)
                     failed = True
                 # Disconnect
                 self.dut.disconnect()
@@ -130,89 +137,92 @@
         # Stop AP
         self.access_point.stop_all_aps()
         if failed:
-            raise signals.TestFailure(
-                'One or more association attempt failed.')
+            raise signals.TestFailure("One or more association attempt failed.")
 
     def test_whirlwind_2g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind',
-            'channel': self.channel_2G,
-            'security_mode': None
-        })
+        self.connect_disconnect(
+            {"profile": "whirlwind", "channel": self.channel_2G, "security_mode": None}
+        )
 
     def test_whirlwind_5g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind',
-            'channel': self.channel_5G,
-            'security_mode': None
-        })
+        self.connect_disconnect(
+            {"profile": "whirlwind", "channel": self.channel_5G, "security_mode": None}
+        )
 
     def test_whirlwind_11ab_2g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind_11ab_legacy',
-            'channel': self.channel_2G,
-            'security_mode': None
-        })
+        self.connect_disconnect(
+            {
+                "profile": "whirlwind_11ab_legacy",
+                "channel": self.channel_2G,
+                "security_mode": None,
+            }
+        )
 
     def test_whirlwind_11ab_5g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind_11ab_legacy',
-            'channel': self.channel_5G,
-            'security_mode': None
-        })
+        self.connect_disconnect(
+            {
+                "profile": "whirlwind_11ab_legacy",
+                "channel": self.channel_5G,
+                "security_mode": None,
+            }
+        )
 
     def test_whirlwind_11ag_2g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind_11ag_legacy',
-            'channel': self.channel_2G,
-            'security_mode': None
-        })
+        self.connect_disconnect(
+            {
+                "profile": "whirlwind_11ag_legacy",
+                "channel": self.channel_2G,
+                "security_mode": None,
+            }
+        )
 
     def test_whirlwind_11ag_5g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind_11ag_legacy',
-            'channel': self.channel_5G,
-            'security_mode': None
-        })
+        self.connect_disconnect(
+            {
+                "profile": "whirlwind_11ag_legacy",
+                "channel": self.channel_5G,
+                "security_mode": None,
+            }
+        )
 
     def test_wrong_ssid_whirlwind_2g(self):
         self.connect_disconnect(
-            {
-                'profile': 'whirlwind',
-                'channel': self.channel_2G,
-                'security_mode': None
-            },
+            {"profile": "whirlwind", "channel": self.channel_2G, "security_mode": None},
             ssid=rand_ascii_str(20),
-            negative_test=True)
+            negative_test=True,
+        )
 
     def test_wrong_ssid_whirlwind_5g(self):
         self.connect_disconnect(
-            {
-                'profile': 'whirlwind',
-                'channel': self.channel_5G,
-                'security_mode': None
-            },
+            {"profile": "whirlwind", "channel": self.channel_5G, "security_mode": None},
             ssid=rand_ascii_str(20),
-            negative_test=True)
+            negative_test=True,
+        )
 
     def test_wrong_password_whirlwind_2g(self):
         self.connect_disconnect(
             {
-                'profile': 'whirlwind',
-                'channel': self.channel_2G,
-                'security_mode': hostapd_constants.WPA2_STRING,
-                'password': rand_ascii_str(10)
+                "profile": "whirlwind",
+                "channel": self.channel_2G,
+                "security_mode": hostapd_constants.WPA2_STRING,
+                "password": rand_ascii_str(10),
             },
             password=rand_ascii_str(20),
-            negative_test=True)
+            negative_test=True,
+        )
 
     def test_wrong_password_whirlwind_5g(self):
         self.connect_disconnect(
             {
-                'profile': 'whirlwind',
-                'channel': self.channel_5G,
-                'security_mode': hostapd_constants.WPA2_STRING,
-                'password': rand_ascii_str(10)
+                "profile": "whirlwind",
+                "channel": self.channel_5G,
+                "security_mode": hostapd_constants.WPA2_STRING,
+                "password": rand_ascii_str(10),
             },
             password=rand_ascii_str(20),
-            negative_test=True)
+            negative_test=True,
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/DownloadStressTest.py b/src/antlion/tests/wlan/functional/DownloadStressTest.py
index e376539..28012c3 100644
--- a/src/antlion/tests/wlan/functional/DownloadStressTest.py
+++ b/src/antlion/tests/wlan/functional/DownloadStressTest.py
@@ -19,16 +19,17 @@
 """
 import threading
 
-from antlion import signals
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 from antlion.test_utils.fuchsia import utils
 from antlion.utils import rand_ascii_str
 
+from mobly import signals, test_runner
 
-class DownloadStressTest(WifiBaseTest):
+
+class DownloadStressTest(base_test.WifiBaseTest):
     # Default number of test iterations here.
     # Override using parameter in config file.
     # Eg: "download_stress_test_iterations": "10"
@@ -38,10 +39,10 @@
     download_timeout_s = 60 * 5
 
     # Download urls
-    url_20MB = 'http://ipv4.download.thinkbroadband.com/20MB.zip'
-    url_40MB = 'http://ipv4.download.thinkbroadband.com/40MB.zip'
-    url_60MB = 'http://ipv4.download.thinkbroadband.com/60MB.zip'
-    url_512MB = 'http://ipv4.download.thinkbroadband.com/512MB.zip'
+    url_20MB = "http://ipv4.download.thinkbroadband.com/20MB.zip"
+    url_40MB = "http://ipv4.download.thinkbroadband.com/40MB.zip"
+    url_60MB = "http://ipv4.download.thinkbroadband.com/60MB.zip"
+    url_512MB = "http://ipv4.download.thinkbroadband.com/512MB.zip"
 
     # Constants used in test_one_large_multiple_small_downloads
     download_small_url = url_20MB
@@ -55,13 +56,17 @@
         self.dut = create_wlan_device(self.fuchsia_devices[0])
         self.access_point = self.access_points[0]
         self.num_of_iterations = int(
-            self.user_params.get("download_stress_test_iterations",
-                                 self.num_of_iterations))
+            self.user_params.get(
+                "download_stress_test_iterations", self.num_of_iterations
+            )
+        )
 
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
         self.dut.associate(self.ssid)
 
     def teardown_test(self):
@@ -89,7 +94,8 @@
         return utils.http_file_download_by_curl(
             self.dut.device,
             url,
-            additional_args='--max-time %d --silent' % self.download_timeout_s)
+            additional_args="--max-time %d --silent" % self.download_timeout_s,
+        )
 
     def download_thread(self, url):
         download_status = self.download_file(url)
@@ -108,8 +114,8 @@
         try:
             # Start multiple downloads at the same time
             for index, url in enumerate(download_urls):
-                self.log.info('Create and start thread %d.' % index)
-                t = threading.Thread(target=self.download_thread, args=(url, ))
+                self.log.info("Create and start thread %d." % index)
+                t = threading.Thread(target=self.download_thread, args=(url,))
                 download_threads.append(t)
                 t.start()
 
@@ -126,13 +132,12 @@
                     is_alive = True
 
             if is_alive:
-                raise signals.TestFailure('Thread %d timedout' % index)
+                raise signals.TestFailure("Thread %d timedout" % index)
 
         for index in range(0, len(self.download_threads_result)):
             if not self.download_threads_result[index]:
                 self.log.info("Download failed for %d" % index)
-                raise signals.TestFailure('Thread %d failed to download' %
-                                          index)
+                raise signals.TestFailure("Thread %d failed to download" % index)
                 return False
 
         return True
@@ -142,15 +147,16 @@
             download_threads = []
             try:
                 large_thread = threading.Thread(
-                    target=self.download_thread,
-                    args=(self.download_large_url, ))
+                    target=self.download_thread, args=(self.download_large_url,)
+                )
                 download_threads.append(large_thread)
                 large_thread.start()
 
                 for i in range(self.num_of_small_downloads):
                     # Start small file download
-                    t = threading.Thread(target=self.download_thread,
-                                         args=(self.download_small_url, ))
+                    t = threading.Thread(
+                        target=self.download_thread, args=(self.download_small_url,)
+                    )
                     download_threads.append(t)
                     t.start()
                     # Wait for thread to exit before starting the next iteration
@@ -168,16 +174,19 @@
                         is_alive = True
 
                 if is_alive:
-                    raise signals.TestFailure('Thread %d timedout' % index)
+                    raise signals.TestFailure("Thread %d timedout" % index)
 
             for index in range(0, len(self.download_threads_result)):
                 if not self.download_threads_result[index]:
                     self.log.info("Download failed for %d" % index)
-                    raise signals.TestFailure('Thread %d failed to download' %
-                                              index)
+                    raise signals.TestFailure("Thread %d failed to download" % index)
                     return False
 
             # Clear results before looping again
             self.download_threads_result.clear()
 
         return True
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/PingStressTest.py b/src/antlion/tests/wlan/functional/PingStressTest.py
index 1653205..431c2e1 100644
--- a/src/antlion/tests/wlan/functional/PingStressTest.py
+++ b/src/antlion/tests/wlan/functional/PingStressTest.py
@@ -24,81 +24,103 @@
 
 from collections import namedtuple
 
-from antlion import signals
 from antlion import utils
-
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 from antlion.utils import rand_ascii_str
 
-LOOPBACK_IPV4 = '127.0.0.1'
-LOOPBACK_IPV6 = '::1'
+from mobly import signals, test_runner
+
+LOOPBACK_IPV4 = "127.0.0.1"
+LOOPBACK_IPV6 = "::1"
 PING_RESULT_TIMEOUT_SEC = 60 * 5
 
 Test = namedtuple(
-    typename='Args',
-    field_names=['name', 'dest_ip', 'count', 'interval', 'timeout', 'size'],
-    defaults=[3, 1000, 1000, 25])
+    typename="Args",
+    field_names=["name", "dest_ip", "count", "interval", "timeout", "size"],
+    defaults=[3, 1000, 1000, 25],
+)
 
 Addrs = namedtuple(
-    typename='Addrs',
-    field_names=['gateway_ipv4', 'gateway_ipv6', 'remote_ipv4', 'remote_ipv6'])
+    typename="Addrs",
+    field_names=["gateway_ipv4", "gateway_ipv6", "remote_ipv4", "remote_ipv6"],
+)
 
 
-class PingStressTest(WifiBaseTest):
-
+class PingStressTest(base_test.WifiBaseTest):
     def setup_generated_tests(self):
         self.generate_tests(
-            self.send_ping, lambda test_name, *_: f'test_{test_name}', [
+            self.send_ping,
+            lambda test_name, *_: f"test_{test_name}",
+            [
                 Test("loopback_ipv4", LOOPBACK_IPV4),
                 Test("loopback_ipv6", LOOPBACK_IPV6),
                 Test("gateway_ipv4", lambda addrs: addrs.gateway_ipv4),
                 Test("gateway_ipv6", lambda addrs: addrs.gateway_ipv6),
-                Test("remote_ipv4_small_packet",
-                     lambda addrs: addrs.remote_ipv4),
-                Test("remote_ipv6_small_packet",
-                     lambda addrs: addrs.remote_ipv6),
-                Test("remote_ipv4_small_packet_long",
-                     lambda addrs: addrs.remote_ipv4,
-                     count=50),
-                Test("remote_ipv6_small_packet_long",
-                     lambda addrs: addrs.remote_ipv6,
-                     count=50),
-                Test("remote_ipv4_medium_packet",
-                     lambda addrs: addrs.remote_ipv4,
-                     size=64),
-                Test("remote_ipv6_medium_packet",
-                     lambda addrs: addrs.remote_ipv6,
-                     size=64),
-                Test("remote_ipv4_medium_packet_long",
-                     lambda addrs: addrs.remote_ipv4,
-                     count=50,
-                     timeout=1500,
-                     size=64),
-                Test("remote_ipv6_medium_packet_long",
-                     lambda addrs: addrs.remote_ipv6,
-                     count=50,
-                     timeout=1500,
-                     size=64),
-                Test("remote_ipv4_large_packet",
-                     lambda addrs: addrs.remote_ipv4,
-                     size=500),
-                Test("remote_ipv6_large_packet",
-                     lambda addrs: addrs.remote_ipv6,
-                     size=500),
-                Test("remote_ipv4_large_packet_long",
-                     lambda addrs: addrs.remote_ipv4,
-                     count=50,
-                     timeout=5000,
-                     size=500),
-                Test("remote_ipv6_large_packet_long",
-                     lambda addrs: addrs.remote_ipv6,
-                     count=50,
-                     timeout=5000,
-                     size=500),
-            ])
+                Test("remote_ipv4_small_packet", lambda addrs: addrs.remote_ipv4),
+                Test("remote_ipv6_small_packet", lambda addrs: addrs.remote_ipv6),
+                Test(
+                    "remote_ipv4_small_packet_long",
+                    lambda addrs: addrs.remote_ipv4,
+                    count=50,
+                ),
+                Test(
+                    "remote_ipv6_small_packet_long",
+                    lambda addrs: addrs.remote_ipv6,
+                    count=50,
+                ),
+                Test(
+                    "remote_ipv4_medium_packet",
+                    lambda addrs: addrs.remote_ipv4,
+                    size=64,
+                ),
+                Test(
+                    "remote_ipv6_medium_packet",
+                    lambda addrs: addrs.remote_ipv6,
+                    size=64,
+                ),
+                Test(
+                    "remote_ipv4_medium_packet_long",
+                    lambda addrs: addrs.remote_ipv4,
+                    count=50,
+                    timeout=1500,
+                    size=64,
+                ),
+                Test(
+                    "remote_ipv6_medium_packet_long",
+                    lambda addrs: addrs.remote_ipv6,
+                    count=50,
+                    timeout=1500,
+                    size=64,
+                ),
+                Test(
+                    "remote_ipv4_large_packet",
+                    lambda addrs: addrs.remote_ipv4,
+                    size=500,
+                ),
+                Test(
+                    "remote_ipv6_large_packet",
+                    lambda addrs: addrs.remote_ipv6,
+                    size=500,
+                ),
+                Test(
+                    "remote_ipv4_large_packet_long",
+                    lambda addrs: addrs.remote_ipv4,
+                    count=50,
+                    timeout=5000,
+                    size=500,
+                ),
+                Test(
+                    "remote_ipv6_large_packet_long",
+                    lambda addrs: addrs.remote_ipv6,
+                    count=50,
+                    timeout=5000,
+                    size=500,
+                ),
+            ],
+        )
 
     def setup_class(self):
         super().setup_class()
@@ -106,29 +128,32 @@
         self.dut = create_wlan_device(self.fuchsia_devices[0])
         self.access_point = self.access_points[0]
         self.iperf_server = self.iperf_servers[0]
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 setup_bridge=True,
-                 is_ipv6_enabled=True,
-                 is_nat_enabled=False)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            setup_bridge=True,
+            is_ipv6_enabled=True,
+            is_nat_enabled=False,
+        )
 
         ap_bridges = self.access_point.interfaces.get_bridge_interface()
         if len(ap_bridges) != 1:
             raise signals.TestAbortClass(
-                f'Expected one bridge interface on the AP, got {ap_bridges}')
+                f"Expected one bridge interface on the AP, got {ap_bridges}"
+            )
         self.ap_ipv4 = utils.get_addr(self.access_point.ssh, ap_bridges[0])
-        self.ap_ipv6 = utils.get_addr(self.access_point.ssh,
-                                      ap_bridges[0],
-                                      addr_type='ipv6_link_local')
-        self.log.info(
-            f"Gateway finished setup ({self.ap_ipv4} | {self.ap_ipv6})")
+        self.ap_ipv6 = utils.get_addr(
+            self.access_point.ssh, ap_bridges[0], addr_type="ipv6_link_local"
+        )
+        self.log.info(f"Gateway finished setup ({self.ap_ipv4} | {self.ap_ipv6})")
 
         self.iperf_server.renew_test_interface_ip_address()
         self.iperf_server_ipv4 = self.iperf_server.get_addr()
         self.iperf_server_ipv6 = self.iperf_server.get_addr(
-            addr_type='ipv6_private_local')
+            addr_type="ipv6_private_local"
+        )
         self.log.info(
             f"Remote finished setup ({self.iperf_server_ipv4} | {self.iperf_server_ipv6})"
         )
@@ -137,9 +162,11 @@
 
         # Wait till the DUT has valid IP addresses after connecting.
         self.dut.device.wait_for_ipv4_addr(
-            self.dut.device.wlan_client_test_interface_name)
+            self.dut.device.wlan_client_test_interface_name
+        )
         self.dut.device.wait_for_ipv6_addr(
-            self.dut.device.wlan_client_test_interface_name)
+            self.dut.device.wlan_client_test_interface_name
+        )
         self.log.info("DUT has valid IP addresses on test network")
 
     def teardown_class(self):
@@ -148,57 +175,53 @@
         self.download_ap_logs()
         self.access_point.stop_all_aps()
 
-    def send_ping(self,
-                  _,
-                  get_addr_fn,
-                  count=3,
-                  interval=1000,
-                  timeout=1000,
-                  size=25):
-        dest_ip = get_addr_fn(
-            Addrs(
-                gateway_ipv4=self.ap_ipv4,
-                # IPv6 link-local addresses require specification of the
-                # outgoing interface as the scope ID when sending packets.
-                gateway_ipv6=
-                f'{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}',
-                remote_ipv4=self.iperf_server_ipv4,
-                # IPv6 global addresses do not require scope IDs.
-                remote_ipv6=self.iperf_server_ipv6)) if callable(
-                    get_addr_fn) else get_addr_fn
+    def send_ping(self, _, get_addr_fn, count=3, interval=1000, timeout=1000, size=25):
+        dest_ip = (
+            get_addr_fn(
+                Addrs(
+                    gateway_ipv4=self.ap_ipv4,
+                    # IPv6 link-local addresses require specification of the
+                    # outgoing interface as the scope ID when sending packets.
+                    gateway_ipv6=f"{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}",
+                    remote_ipv4=self.iperf_server_ipv4,
+                    # IPv6 global addresses do not require scope IDs.
+                    remote_ipv6=self.iperf_server_ipv6,
+                )
+            )
+            if callable(get_addr_fn)
+            else get_addr_fn
+        )
 
-        self.log.info(f'Attempting to ping {dest_ip}...')
-        ping_result = self.dut.can_ping(dest_ip, count, interval, timeout,
-                                        size)
+        self.log.info(f"Attempting to ping {dest_ip}...")
+        ping_result = self.dut.can_ping(dest_ip, count, interval, timeout, size)
         if ping_result:
-            self.log.info('Ping was successful.')
+            self.log.info("Ping was successful.")
         else:
-            raise signals.TestFailure('Ping was unsuccessful.')
+            raise signals.TestFailure("Ping was unsuccessful.")
 
     def test_simultaneous_pings(self):
         ping_urls = [
             self.iperf_server_ipv4,
             self.ap_ipv4,
             self.iperf_server_ipv6,
-            f'{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}',
+            f"{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}",
         ]
         ping_threads = []
         ping_results = []
 
         def ping_thread(self, dest_ip, ping_results):
-            self.log.info('Attempting to ping %s...' % dest_ip)
+            self.log.info("Attempting to ping %s..." % dest_ip)
             ping_result = self.dut.can_ping(dest_ip, count=10, size=50)
             if ping_result:
-                self.log.info('Success pinging: %s' % dest_ip)
+                self.log.info("Success pinging: %s" % dest_ip)
             else:
-                self.log.info('Failure pinging: %s' % dest_ip)
+                self.log.info("Failure pinging: %s" % dest_ip)
             ping_results.append(ping_result)
 
         try:
             # Start multiple ping at the same time
             for index, url in enumerate(ping_urls):
-                t = threading.Thread(target=ping_thread,
-                                     args=(self, url, ping_results))
+                t = threading.Thread(target=ping_thread, args=(self, url, ping_results))
                 ping_threads.append(t)
                 t.start()
 
@@ -215,10 +238,13 @@
                     is_alive = True
 
             if is_alive:
-                raise signals.TestFailure(
-                    f'Timed out while pinging {ping_urls[index]}')
+                raise signals.TestFailure(f"Timed out while pinging {ping_urls[index]}")
 
         for index in range(0, len(ping_results)):
             if not ping_results[index]:
-                raise signals.TestFailure(f'Failed to ping {ping_urls[index]}')
+                raise signals.TestFailure(f"Failed to ping {ping_urls[index]}")
         return True
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/SoftApTest.py b/src/antlion/tests/wlan/functional/SoftApTest.py
index 7255b0c..471c5a3 100644
--- a/src/antlion/tests/wlan/functional/SoftApTest.py
+++ b/src/antlion/tests/wlan/functional/SoftApTest.py
@@ -14,53 +14,55 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from mobly import signals
 import multiprocessing as mp
 import random
 import time
 
 from antlion import utils
-from antlion import asserts
 from antlion.controllers import iperf_server
 from antlion.controllers import iperf_client
 from antlion.controllers.access_point import setup_ap, AccessPoint
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib import hostapd_security
 from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.controllers.utils_lib.ssh import settings
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 
-CONNECTIVITY_MODE_LOCAL = 'local_only'
-CONNECTIVITY_MODE_UNRESTRICTED = 'unrestricted'
-DEFAULT_AP_PROFILE = 'whirlwind'
+from mobly import asserts, signals, test_runner
+
+CONNECTIVITY_MODE_LOCAL = "local_only"
+CONNECTIVITY_MODE_UNRESTRICTED = "unrestricted"
+DEFAULT_AP_PROFILE = "whirlwind"
 DEFAULT_IPERF_PORT = 5201
 DEFAULT_STRESS_TEST_ITERATIONS = 10
 DEFAULT_TIMEOUT = 30
 DEFAULT_IPERF_TIMEOUT = 60
 DEFAULT_NO_ADDR_EXPECTED_TIMEOUT = 5
-INTERFACE_ROLE_AP = 'Ap'
-INTERFACE_ROLE_CLIENT = 'Client'
-OPERATING_BAND_2G = 'only_2_4_ghz'
-OPERATING_BAND_5G = 'only_5_ghz'
-OPERATING_BAND_ANY = 'any'
-SECURITY_OPEN = 'none'
-SECURITY_WEP = 'wep'
-SECURITY_WPA = 'wpa'
-SECURITY_WPA2 = 'wpa2'
-SECURITY_WPA3 = 'wpa3'
+INTERFACE_ROLE_AP = "Ap"
+INTERFACE_ROLE_CLIENT = "Client"
+OPERATING_BAND_2G = "only_2_4_ghz"
+OPERATING_BAND_5G = "only_5_ghz"
+OPERATING_BAND_ANY = "any"
+SECURITY_OPEN = "none"
+SECURITY_WEP = "wep"
+SECURITY_WPA = "wpa"
+SECURITY_WPA2 = "wpa2"
+SECURITY_WPA3 = "wpa3"
 STATE_UP = True
 STATE_DOWN = False
-TEST_TYPE_ASSOCIATE_ONLY = 'associate_only'
-TEST_TYPE_ASSOCIATE_AND_PING = 'associate_and_ping'
-TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC = 'associate_and_pass_traffic'
+TEST_TYPE_ASSOCIATE_ONLY = "associate_only"
+TEST_TYPE_ASSOCIATE_AND_PING = "associate_and_ping"
+TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC = "associate_and_pass_traffic"
 TEST_TYPES = {
-    TEST_TYPE_ASSOCIATE_ONLY, TEST_TYPE_ASSOCIATE_AND_PING,
-    TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC
+    TEST_TYPE_ASSOCIATE_ONLY,
+    TEST_TYPE_ASSOCIATE_AND_PING,
+    TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC,
 }
 
 
 def get_test_name_from_settings(settings):
-    return settings['test_name']
+    return settings["test_name"]
 
 
 def get_ap_params_from_config_or_default(config):
@@ -72,12 +74,11 @@
     Returns:
         dict, containing all AP parameters
     """
-    profile = config.get('profile', DEFAULT_AP_PROFILE)
-    ssid = config.get(
-        'ssid', utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G))
-    channel = config.get('channel', hostapd_constants.AP_DEFAULT_CHANNEL_2G)
-    security_mode = config.get('security_mode', None)
-    password = config.get('password', None)
+    profile = config.get("profile", DEFAULT_AP_PROFILE)
+    ssid = config.get("ssid", utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G))
+    channel = config.get("channel", hostapd_constants.AP_DEFAULT_CHANNEL_2G)
+    security_mode = config.get("security_mode", None)
+    password = config.get("password", None)
     if security_mode:
         if not password:
             password = generate_random_password(security_mode=security_mode)
@@ -86,11 +87,11 @@
         security = None
 
     return {
-        'profile': profile,
-        'ssid': ssid,
-        'channel': channel,
-        'security': security,
-        'password': password
+        "profile": profile,
+        "ssid": ssid,
+        "channel": channel,
+        "security": security,
+        "password": password,
     }
 
 
@@ -104,26 +105,24 @@
     Returns:
         dict, containing all soft AP parameters
     """
-    ssid = config.get(
-        'ssid', utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G))
-    connectivity_mode = config.get('connectivity_mode',
-                                   CONNECTIVITY_MODE_LOCAL)
-    operating_band = config.get('operating_band', OPERATING_BAND_2G)
-    security_type = config.get('security_type', SECURITY_OPEN)
-    password = config.get('password', '')
+    ssid = config.get("ssid", utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G))
+    connectivity_mode = config.get("connectivity_mode", CONNECTIVITY_MODE_LOCAL)
+    operating_band = config.get("operating_band", OPERATING_BAND_2G)
+    security_type = config.get("security_type", SECURITY_OPEN)
+    password = config.get("password", "")
 
     # The SoftAP API uses 'open' security instead of None, '' password
     # instead of None, and security_type instead of security_mode, hence
     # the difference between ap_params and soft_ap_params
-    if security_type != SECURITY_OPEN and password == '':
+    if security_type != SECURITY_OPEN and password == "":
         password = generate_random_password(security_mode=security_type)
 
     return {
-        'ssid': ssid,
-        'connectivity_mode': connectivity_mode,
-        'operating_band': operating_band,
-        'security_type': security_type,
-        'password': password
+        "ssid": ssid,
+        "connectivity_mode": connectivity_mode,
+        "operating_band": operating_band,
+        "security_type": security_type,
+        "password": password,
     }
 
 
@@ -131,7 +130,7 @@
     """Used to differentiate a subtest failure from an actual exception"""
 
 
-class SoftApTest(WifiBaseTest):
+class SoftApTest(base_test.WifiBaseTest):
     """Tests for Fuchsia SoftAP
 
     Testbed requirement:
@@ -146,8 +145,7 @@
     """
 
     def setup_class(self):
-        self.soft_ap_test_params = self.user_params.get(
-            'soft_ap_test_params', {})
+        self.soft_ap_test_params = self.user_params.get("soft_ap_test_params", {})
         self.dut = create_wlan_device(self.fuchsia_devices[0])
 
         # TODO(fxb/51313): Add in device agnosticity for clients
@@ -158,25 +156,30 @@
             client_wlan_device = create_wlan_device(device)
             self.clients.append(client_wlan_device)
             self.iperf_clients_map[
-                client_wlan_device] = client_wlan_device.create_iperf_client()
+                client_wlan_device
+            ] = client_wlan_device.create_iperf_client()
         self.primary_client = self.clients[0]
 
         # Create an iperf server on the DUT, which will be used for any streaming.
-        self.iperf_server_config = {
-            'user': self.dut.device.ssh_username,
-            'host': self.dut.device.ip,
-            'ssh_config': self.dut.device.ssh_config
-        }
+        self.iperf_server_settings = settings.from_config(
+            {
+                "user": self.dut.device.ssh_username,
+                "host": self.dut.device.ip,
+                "ssh_config": self.dut.device.ssh_config,
+            }
+        )
         self.iperf_server = iperf_server.IPerfServerOverSsh(
-            self.iperf_server_config, DEFAULT_IPERF_PORT, use_killall=True)
+            self.iperf_server_settings, DEFAULT_IPERF_PORT, use_killall=True
+        )
         self.iperf_server.start()
 
         # Attempt to create an ap iperf server. AP is only required for tests
         # that use client mode.
         try:
-            self.access_point = self.access_points[0]
+            self.access_point: AccessPoint = self.access_points[0]
             self.ap_iperf_client = iperf_client.IPerfClientOverSsh(
-                self.user_params['AccessPoint'][0]['ssh_config'])
+                self.access_point.ssh_settings
+            )
         except AttributeError:
             self.access_point = None
             self.ap_iperf_client = None
@@ -226,24 +229,26 @@
                 operating_band: string, band for softAP network
                     - 'any', 'only_5_ghz', 'only_2_4_ghz'
         """
-        ssid = settings['ssid']
-        security_type = settings['security_type']
-        password = settings.get('password', '')
-        connectivity_mode = settings['connectivity_mode']
-        operating_band = settings['operating_band']
+        ssid = settings["ssid"]
+        security_type = settings["security_type"]
+        password = settings.get("password", "")
+        connectivity_mode = settings["connectivity_mode"]
+        operating_band = settings["operating_band"]
 
-        self.log.info('Starting SoftAP on DUT with settings: %s' % settings)
+        self.log.info("Starting SoftAP on DUT with settings: %s" % settings)
 
         response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-            ssid, security_type, password, connectivity_mode, operating_band)
-        if response.get('error'):
-            raise EnvironmentError('SL4F: Failed to setup SoftAP. Err: %s' %
-                                   response['error'])
+            ssid, security_type, password, connectivity_mode, operating_band
+        )
+        if response.get("error"):
+            raise EnvironmentError(
+                "SL4F: Failed to setup SoftAP. Err: %s" % response["error"]
+            )
 
-        self.log.info('SoftAp network (%s) is up.' % ssid)
+        self.log.info("SoftAp network (%s) is up." % ssid)
 
     def stop_soft_ap(self, settings):
-        """ Stops a specific SoftAP On Fuchsia device.
+        """Stops a specific SoftAP On Fuchsia device.
 
         Args:
             settings: a dict containing softAP config params (see start_soft_ap)
@@ -252,28 +257,29 @@
         Raises:
             EnvironmentError, if StopSoftAP call fails.
         """
-        ssid = settings['ssid']
-        security_type = settings['security_type']
-        password = settings.get('password', '')
+        ssid = settings["ssid"]
+        security_type = settings["security_type"]
+        password = settings.get("password", "")
 
         response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAccessPoint(
-            ssid, security_type, password)
-        if response.get('error'):
-            raise EnvironmentError('SL4F: Failed to stop SoftAP. Err: %s' %
-                                   response['error'])
+            ssid, security_type, password
+        )
+        if response.get("error"):
+            raise EnvironmentError(
+                "SL4F: Failed to stop SoftAP. Err: %s" % response["error"]
+            )
 
     def stop_all_soft_aps(self):
-        """ Stops all SoftAPs on Fuchsia Device.
+        """Stops all SoftAPs on Fuchsia Device.
 
         Raises:
             EnvironmentError, if StopAllAps call fails.
         """
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint(
-        )
-        if response.get('error'):
+        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
+        if response.get("error"):
             raise EnvironmentError(
-                'SL4F: Failed to stop all SoftAPs. Err: %s' %
-                response['error'])
+                "SL4F: Failed to stop all SoftAPs. Err: %s" % response["error"]
+            )
 
     def associate_with_soft_ap(self, device, soft_ap_settings):
         """Associates client device with softAP on Fuchsia device.
@@ -287,24 +293,27 @@
             TestFailure, if association fails
         """
         self.log.info(
-            'Attempting to associate client %s with SoftAP on FuchsiaDevice '
-            '(%s).' % (device.identifier, self.dut.identifier))
+            "Attempting to associate client %s with SoftAP on FuchsiaDevice "
+            "(%s)." % (device.identifier, self.dut.identifier)
+        )
 
-        check_connectivity = soft_ap_settings[
-            'connectivity_mode'] == CONNECTIVITY_MODE_UNRESTRICTED
+        check_connectivity = (
+            soft_ap_settings["connectivity_mode"] == CONNECTIVITY_MODE_UNRESTRICTED
+        )
         associated = device.associate(
-            soft_ap_settings['ssid'],
-            target_pwd=soft_ap_settings.get('password'),
-            target_security=hostapd_constants.
-            SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                soft_ap_settings['security_type'], None),
-            check_connectivity=check_connectivity)
+            soft_ap_settings["ssid"],
+            target_pwd=soft_ap_settings.get("password"),
+            target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                soft_ap_settings["security_type"], None
+            ),
+            check_connectivity=check_connectivity,
+        )
 
         if not associated:
-            self.log.error('Failed to connect to SoftAp.')
+            self.log.error("Failed to connect to SoftAp.")
             return False
 
-        self.log.info('Client successfully associated with SoftAP.')
+        self.log.info("Client successfully associated with SoftAP.")
         return True
 
     def disconnect_from_soft_ap(self, device):
@@ -313,8 +322,7 @@
         Args:
             device: wlan_device to disconnect from SoftAP
         """
-        self.log.info('Disconnecting device %s from SoftAP.' %
-                      device.identifier)
+        self.log.info("Disconnecting device %s from SoftAP." % device.identifier)
         device.disconnect()
 
     def get_device_test_interface(self, device, role=None, channel=None):
@@ -338,11 +346,10 @@
             elif role == INTERFACE_ROLE_AP:
                 return device.device.wlan_ap_test_interface_name
             else:
-                raise ValueError('Unsupported interface role: %s' % role)
+                raise ValueError("Unsupported interface role: %s" % role)
         elif isinstance(device, AccessPoint):
             if not channel:
-                raise ValueError(
-                    'Must provide a channel to get AccessPoint interface')
+                raise ValueError("Must provide a channel to get AccessPoint interface")
             if channel < 36:
                 return device.wlan_2g
             else:
@@ -350,11 +357,8 @@
         else:
             return device.get_default_wlan_test_interface()
 
-    def wait_for_ipv4_address(self,
-                              device,
-                              interface_name,
-                              timeout=DEFAULT_TIMEOUT):
-        """ Waits for interface on a wlan_device to get an ipv4 address.
+    def wait_for_ipv4_address(self, device, interface_name, timeout=DEFAULT_TIMEOUT):
+        """Waits for interface on a wlan_device to get an ipv4 address.
 
         Args:
             device: wlan_device or AccessPoint to check interface
@@ -370,21 +374,22 @@
             comm_channel = device.device
         end_time = time.time() + timeout
         while time.time() < end_time:
-            ips = utils.get_interface_ip_addresses(comm_channel,
-                                                   interface_name)
-            if len(ips['ipv4_private']) > 0:
-                self.log.info('Device %s interface %s has ipv4 address %s' %
-                              (device.identifier, interface_name,
-                               ips['ipv4_private'][0]))
-                return ips['ipv4_private'][0]
+            ips = utils.get_interface_ip_addresses(comm_channel, interface_name)
+            if len(ips["ipv4_private"]) > 0:
+                self.log.info(
+                    "Device %s interface %s has ipv4 address %s"
+                    % (device.identifier, interface_name, ips["ipv4_private"][0])
+                )
+                return ips["ipv4_private"][0]
             else:
                 time.sleep(1)
         raise ConnectionError(
-            'After %s seconds, device %s still does not have an ipv4 address '
-            'on interface %s.' % (timeout, device.identifier, interface_name))
+            "After %s seconds, device %s still does not have an ipv4 address "
+            "on interface %s." % (timeout, device.identifier, interface_name)
+        )
 
     def device_can_ping_addr(self, device, dest_ip, timeout=DEFAULT_TIMEOUT):
-        """ Verify wlan_device can ping a destination ip.
+        """Verify wlan_device can ping a destination ip.
 
         Args:
             device: wlan_device to initiate ping
@@ -399,17 +404,22 @@
                 ping_result = device.can_ping(dest_ip)
 
             if ping_result:
-                self.log.info('Ping successful from device %s to dest ip %s.' %
-                              (device.identifier, dest_ip))
+                self.log.info(
+                    "Ping successful from device %s to dest ip %s."
+                    % (device.identifier, dest_ip)
+                )
                 return True
             else:
                 self.log.debug(
-                    'Device %s could not ping dest ip %s. Retrying in 1 second.'
-                    % (device.identifier, dest_ip))
+                    "Device %s could not ping dest ip %s. Retrying in 1 second."
+                    % (device.identifier, dest_ip)
+                )
                 time.sleep(1)
         else:
-            self.log.info('Failed to ping from device %s to dest ip %s.' %
-                          (device.identifier, dest_ip))
+            self.log.info(
+                "Failed to ping from device %s to dest ip %s."
+                % (device.identifier, dest_ip)
+            )
             return False
 
     def run_iperf_traffic(self, ip_client, server_address, server_port=5201):
@@ -426,47 +436,59 @@
         ip_client_identifier = self.get_iperf_client_identifier(ip_client)
 
         self.log.info(
-            'Running traffic from iperf client %s to iperf server %s.' %
-            (ip_client_identifier, server_address))
+            "Running traffic from iperf client %s to iperf server %s."
+            % (ip_client_identifier, server_address)
+        )
         client_to_ap_path = ip_client.start(
-            server_address, '-i 1 -t 10 -J -p %s' % server_port,
-            'client_to_soft_ap')
+            server_address, "-i 1 -t 10 -J -p %s" % server_port, "client_to_soft_ap"
+        )
 
         client_to_ap_result = iperf_server.IPerfResult(client_to_ap_path)
-        if (not client_to_ap_result.avg_receive_rate):
+        if not client_to_ap_result.avg_receive_rate:
             raise ConnectionError(
-                'Failed to pass traffic from iperf client %s to iperf server %s.'
-                % (ip_client_identifier, server_address))
+                "Failed to pass traffic from iperf client %s to iperf server %s."
+                % (ip_client_identifier, server_address)
+            )
 
         self.log.info(
-            'Passed traffic from iperf client %s to iperf server %s with avg '
-            'rate of %s MB/s.' % (ip_client_identifier, server_address,
-                                  client_to_ap_result.avg_receive_rate))
+            "Passed traffic from iperf client %s to iperf server %s with avg "
+            "rate of %s MB/s."
+            % (
+                ip_client_identifier,
+                server_address,
+                client_to_ap_result.avg_receive_rate,
+            )
+        )
 
         self.log.info(
-            'Running traffic from iperf server %s to iperf client %s.' %
-            (server_address, ip_client_identifier))
+            "Running traffic from iperf server %s to iperf client %s."
+            % (server_address, ip_client_identifier)
+        )
         ap_to_client_path = ip_client.start(
-            server_address, '-i 1 -t 10 -R -J -p %s' % server_port,
-            'soft_ap_to_client')
+            server_address, "-i 1 -t 10 -R -J -p %s" % server_port, "soft_ap_to_client"
+        )
 
         ap_to_client_result = iperf_server.IPerfResult(ap_to_client_path)
-        if (not ap_to_client_result.avg_receive_rate):
+        if not ap_to_client_result.avg_receive_rate:
             raise ConnectionError(
-                'Failed to pass traffic from iperf server %s to iperf client %s.'
-                % (server_address, ip_client_identifier))
+                "Failed to pass traffic from iperf server %s to iperf client %s."
+                % (server_address, ip_client_identifier)
+            )
 
         self.log.info(
-            'Passed traffic from iperf server %s to iperf client %s with avg '
-            'rate of %s MB/s.' % (server_address, ip_client_identifier,
-                                  ap_to_client_result.avg_receive_rate))
+            "Passed traffic from iperf server %s to iperf client %s with avg "
+            "rate of %s MB/s."
+            % (
+                server_address,
+                ip_client_identifier,
+                ap_to_client_result.avg_receive_rate,
+            )
+        )
 
-    def run_iperf_traffic_parallel_process(self,
-                                           ip_client,
-                                           server_address,
-                                           error_queue,
-                                           server_port=5201):
-        """ Executes run_iperf_traffic using a queue to capture errors. Used
+    def run_iperf_traffic_parallel_process(
+        self, ip_client, server_address, error_queue, server_port=5201
+    ):
+        """Executes run_iperf_traffic using a queue to capture errors. Used
         when running iperf in a parallel process.
 
         Args:
@@ -476,16 +498,15 @@
             server_port: port of the iperf server
         """
         try:
-            self.run_iperf_traffic(ip_client,
-                                   server_address,
-                                   server_port=server_port)
+            self.run_iperf_traffic(ip_client, server_address, server_port=server_port)
         except ConnectionError as err:
-            error_queue.put('In iperf process from %s to %s: %s' %
-                            (self.get_iperf_client_identifier(ip_client),
-                             server_address, err))
+            error_queue.put(
+                "In iperf process from %s to %s: %s"
+                % (self.get_iperf_client_identifier(ip_client), server_address, err)
+            )
 
     def get_iperf_client_identifier(self, ip_client):
-        """ Retrieves an indentifer string from iperf client, for logging.
+        """Retrieves an indentifer string from iperf client, for logging.
 
         Args:
             ip_client: iperf client to grab identifier from
@@ -494,13 +515,10 @@
             return ip_client._android_device_or_serial.serial
         return ip_client._ssh_settings.hostname
 
-    def device_is_connected_to_ap(self,
-                                  client,
-                                  ap,
-                                  channel=None,
-                                  check_traffic=False,
-                                  timeout=DEFAULT_TIMEOUT):
-        """ Returns whether client device can ping (and optionally pass traffic)
+    def device_is_connected_to_ap(
+        self, client, ap, channel=None, check_traffic=False, timeout=DEFAULT_TIMEOUT
+    ):
+        """Returns whether client device can ping (and optionally pass traffic)
         to the ap device.
 
         Args:
@@ -517,40 +535,37 @@
         try:
             # Get interfaces
             client_interface = self.get_device_test_interface(
-                client, INTERFACE_ROLE_CLIENT)
+                client, INTERFACE_ROLE_CLIENT
+            )
             ap_interface = self.get_device_test_interface(
-                ap, role=INTERFACE_ROLE_AP, channel=channel)
+                ap, role=INTERFACE_ROLE_AP, channel=channel
+            )
 
             # Get addresses
-            client_ipv4 = self.wait_for_ipv4_address(client,
-                                                     client_interface,
-                                                     timeout=timeout)
-            ap_ipv4 = self.wait_for_ipv4_address(ap,
-                                                 ap_interface,
-                                                 timeout=timeout)
+            client_ipv4 = self.wait_for_ipv4_address(
+                client, client_interface, timeout=timeout
+            )
+            ap_ipv4 = self.wait_for_ipv4_address(ap, ap_interface, timeout=timeout)
         except ConnectionError as err:
-            self.log.error(
-                'Failed to retrieve interfaces and addresses. Err: %s' % err)
+            self.log.error("Failed to retrieve interfaces and addresses. Err: %s" % err)
             return False
 
         if not self.device_can_ping_addr(client, ap_ipv4):
-            self.log.error('Failed to ping from client to ap.')
+            self.log.error("Failed to ping from client to ap.")
             return False
 
         if not self.device_can_ping_addr(ap, client_ipv4):
-            self.log.error('Failed to ping from ap to client.')
+            self.log.error("Failed to ping from ap to client.")
             return False
 
         if check_traffic:
             try:
                 if client is self.dut:
-                    self.run_iperf_traffic(self.iperf_clients_map[ap],
-                                           client_ipv4)
+                    self.run_iperf_traffic(self.iperf_clients_map[ap], client_ipv4)
                 else:
-                    self.run_iperf_traffic(self.iperf_clients_map[client],
-                                           ap_ipv4)
+                    self.run_iperf_traffic(self.iperf_clients_map[client], ap_ipv4)
             except ConnectionError as err:
-                self.log.error('Failed to run traffic between DUT and AP.')
+                self.log.error("Failed to run traffic between DUT and AP.")
                 return False
         return True
 
@@ -567,12 +582,11 @@
             with utils.SuppressLogOutput():
                 try:
                     return not self.device_is_connected_to_ap(
-                        client,
-                        self.dut,
-                        timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT)
+                        client, self.dut, timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT
+                    )
                 # Allow a failed to find ap interface error
                 except LookupError as err:
-                    self.log.debug('Hit expected LookupError: %s' % err)
+                    self.log.debug("Hit expected LookupError: %s" % err)
                     return True
 
     def verify_client_mode_connectivity_from_state(self, state, channel):
@@ -583,9 +597,9 @@
             channel: int, channel of the APs network
         """
         if state == STATE_UP:
-            return self.device_is_connected_to_ap(self.dut,
-                                                  self.access_point,
-                                                  channel=channel)
+            return self.device_is_connected_to_ap(
+                self.dut, self.access_point, channel=channel
+            )
         else:
             with utils.SuppressLogOutput():
                 try:
@@ -593,32 +607,33 @@
                         self.dut,
                         self.access_point,
                         channel=channel,
-                        timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT)
+                        timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT,
+                    )
                 # Allow a failed to find client interface error
                 except LookupError as err:
-                    self.log.debug('Hit expected LookupError: %s' % err)
+                    self.log.debug("Hit expected LookupError: %s" % err)
                     return True
 
-# Test Types
+    # Test Types
 
     def verify_soft_ap_associate_only(self, client, soft_ap_settings):
         if not self.associate_with_soft_ap(client, soft_ap_settings):
-            asserts.fail('Failed to associate client with SoftAP.')
+            asserts.fail("Failed to associate client with SoftAP.")
 
     def verify_soft_ap_associate_and_ping(self, client, soft_ap_settings):
         self.verify_soft_ap_associate_only(client, soft_ap_settings)
         if not self.device_is_connected_to_ap(client, self.dut):
-            asserts.fail('Client and SoftAP could not ping eachother.')
+            asserts.fail("Client and SoftAP could not ping eachother.")
 
     def verify_soft_ap_associate_and_pass_traffic(self, client, settings):
         self.verify_soft_ap_associate_only(client, settings)
-        if not self.device_is_connected_to_ap(
-                client, self.dut, check_traffic=True):
+        if not self.device_is_connected_to_ap(client, self.dut, check_traffic=True):
             asserts.fail(
-                'Client and SoftAP not responding to pings and passing traffic '
-                'as expected.')
+                "Client and SoftAP not responding to pings and passing traffic "
+                "as expected."
+            )
 
-# Runners for Generated Test Cases
+    # Runners for Generated Test Cases
 
     def run_soft_ap_association_stress_test(self, settings):
         """Sets up a SoftAP, and repeatedly associates and disassociates a
@@ -628,64 +643,68 @@
             settings: test configuration settings, see
                 test_soft_ap_association_stress for details
         """
-        client = settings['client']
-        soft_ap_params = settings['soft_ap_params']
-        test_type = settings['test_type']
+        client = settings["client"]
+        soft_ap_params = settings["soft_ap_params"]
+        test_type = settings["test_type"]
         if not test_type in TEST_TYPES:
-            raise ValueError('Unrecognized test type %s' % test_type)
-        iterations = settings['iterations']
+            raise ValueError("Unrecognized test type %s" % test_type)
+        iterations = settings["iterations"]
         self.log.info(
-            'Running association stress test type %s in iteration %s times' %
-            (test_type, iterations))
+            "Running association stress test type %s in iteration %s times"
+            % (test_type, iterations)
+        )
 
         self.start_soft_ap(soft_ap_params)
 
         passed_count = 0
         for run in range(iterations):
             try:
-                self.log.info('Starting SoftAp association run %s' %
-                              str(run + 1))
+                self.log.info("Starting SoftAp association run %s" % str(run + 1))
 
                 if test_type == TEST_TYPE_ASSOCIATE_ONLY:
                     self.verify_soft_ap_associate_only(client, soft_ap_params)
 
                 elif test_type == TEST_TYPE_ASSOCIATE_AND_PING:
-                    self.verify_soft_ap_associate_and_ping(
-                        client, soft_ap_params)
+                    self.verify_soft_ap_associate_and_ping(client, soft_ap_params)
 
                 elif test_type == TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC:
                     self.verify_soft_ap_associate_and_pass_traffic(
-                        client, soft_ap_params)
+                        client, soft_ap_params
+                    )
 
                 else:
-                    raise AttributeError('Invalid test type: %s' % test_type)
+                    raise AttributeError("Invalid test type: %s" % test_type)
 
             except signals.TestFailure as err:
                 self.log.error(
-                    'SoftAp association stress run %s failed. Err: %s' %
-                    (str(run + 1), err.details))
+                    "SoftAp association stress run %s failed. Err: %s"
+                    % (str(run + 1), err.details)
+                )
             else:
-                self.log.info('SoftAp association stress run %s successful.' %
-                              str(run + 1))
+                self.log.info(
+                    "SoftAp association stress run %s successful." % str(run + 1)
+                )
                 passed_count += 1
 
         if passed_count < iterations:
             asserts.fail(
-                'SoftAp association stress test passed on %s/%s runs.' %
-                (passed_count, iterations))
+                "SoftAp association stress test passed on %s/%s runs."
+                % (passed_count, iterations)
+            )
 
         asserts.explicit_pass(
-            'SoftAp association stress test passed on %s/%s runs.' %
-            (passed_count, iterations))
+            "SoftAp association stress test passed on %s/%s runs."
+            % (passed_count, iterations)
+        )
 
-# Alternate SoftAP and Client mode test
+    # Alternate SoftAP and Client mode test
 
     def run_soft_ap_and_client_mode_alternating_test(self, settings):
         """Runs a single soft_ap and client alternating stress test.
 
         See test_soft_ap_and_client_mode_alternating_stress for details.
         """
-        iterations = settings['iterations']
+        iterations = settings["iterations"]
         pass_count = 0
         current_soft_ap_state = STATE_DOWN
         current_client_mode_state = STATE_DOWN
@@ -698,11 +717,12 @@
             # to occur, exit early.
             for _ in range(2):
                 (current_soft_ap_state, err) = self.run_toggle_iteration_func(
-                    self.soft_ap_toggle_test_iteration, settings,
-                    current_soft_ap_state)
+                    self.soft_ap_toggle_test_iteration, settings, current_soft_ap_state
+                )
                 if err:
-                    self.log.error('Iteration %s failed. Err: %s' %
-                                   (str(iteration + 1), err))
+                    self.log.error(
+                        "Iteration %s failed. Err: %s" % (str(iteration + 1), err)
+                    )
                     passes = False
                 if current_soft_ap_state == STATE_DOWN:
                     break
@@ -710,13 +730,15 @@
             # Attempt to toggle Client mode on, then off. If the first toggle,
             # fails to occur, exit early.
             for _ in range(2):
-                (current_client_mode_state,
-                 err) = self.run_toggle_iteration_func(
-                     self.client_mode_toggle_test_iteration, settings,
-                     current_client_mode_state)
+                (current_client_mode_state, err) = self.run_toggle_iteration_func(
+                    self.client_mode_toggle_test_iteration,
+                    settings,
+                    current_client_mode_state,
+                )
                 if err:
-                    self.log.error('Iteration %s failed. Err: %s' %
-                                   (str(iteration + 1), err))
+                    self.log.error(
+                        "Iteration %s failed. Err: %s" % (str(iteration + 1), err)
+                    )
                     passes = False
                 if current_client_mode_state == STATE_DOWN:
                     break
@@ -726,14 +748,16 @@
 
         if pass_count == iterations:
             asserts.explicit_pass(
-                'Toggle SoftAP and client mode stress test passed %s/%s times.'
-                % (pass_count, iterations))
+                "Toggle SoftAP and client mode stress test passed %s/%s times."
+                % (pass_count, iterations)
+            )
         else:
             asserts.fail(
-                'Toggle SoftAP and client mode stress test only passed %s/%s '
-                'times.' % (pass_count, iterations))
+                "Toggle SoftAP and client mode stress test only passed %s/%s "
+                "times." % (pass_count, iterations)
+            )
 
-# Toggle Stress Test Helper Functions
+    # Toggle Stress Test Helper Functions
 
     def run_toggle_stress_test(self, settings):
         """Runner function for toggle stress tests.
@@ -748,30 +772,33 @@
             PASS: if all iterations of the test function pass
             FAIL: if any iteration of the test function fails
         """
-        test_runner_func = settings['test_runner_func']
-        pre_test_func = settings.get('pre_test_func', None)
-        iterations = settings['iterations']
+        test_runner_func = settings["test_runner_func"]
+        pre_test_func = settings.get("pre_test_func", None)
+        iterations = settings["iterations"]
         if pre_test_func:
             pre_test_func(settings)
 
         pass_count = 0
         current_state = STATE_DOWN
         for iteration in range(iterations):
-            (current_state,
-             err) = self.run_toggle_iteration_func(test_runner_func, settings,
-                                                   current_state)
+            (current_state, err) = self.run_toggle_iteration_func(
+                test_runner_func, settings, current_state
+            )
             if err:
-                self.log.error('Iteration %s failed. Err: %s' %
-                               (str(iteration + 1), err))
+                self.log.error(
+                    "Iteration %s failed. Err: %s" % (str(iteration + 1), err)
+                )
             else:
                 pass_count += 1
 
         if pass_count == iterations:
-            asserts.explicit_pass('Stress test passed %s/%s times.' %
-                                  (pass_count, iterations))
+            asserts.explicit_pass(
+                "Stress test passed %s/%s times." % (pass_count, iterations)
+            )
         else:
-            asserts.fail('Stress test only passed %s/%s '
-                         'times.' % (pass_count, iterations))
+            asserts.fail(
+                "Stress test only passed %s/%s " "times." % (pass_count, iterations)
+            )
 
     def run_toggle_iteration_func(self, func, settings, current_state):
         """Runs a toggle iteration function, updating the current state
@@ -806,7 +833,7 @@
         else:
             return (not current_state, None)
 
-# Stress Test Toggle Functions
+    # Stress Test Toggle Functions
 
     def start_soft_ap_and_verify_connected(self, client, soft_ap_params):
         """Sets up SoftAP, associates a client, then verifies connection.
@@ -820,19 +847,21 @@
             is not functioning as expected
         """
         # Change SSID every time, to avoid client connection issues.
-        soft_ap_params['ssid'] = utils.rand_ascii_str(
-            hostapd_constants.AP_SSID_LENGTH_2G)
+        soft_ap_params["ssid"] = utils.rand_ascii_str(
+            hostapd_constants.AP_SSID_LENGTH_2G
+        )
         self.start_soft_ap(soft_ap_params)
         associated = self.associate_with_soft_ap(client, soft_ap_params)
         if not associated:
             raise StressTestIterationFailure(
-                'Failed to associated client to DUT SoftAP. '
-                'Continuing with iterations.')
+                "Failed to associated client to DUT SoftAP. "
+                "Continuing with iterations."
+            )
 
         if not self.verify_soft_ap_connectivity_from_state(STATE_UP, client):
             raise StressTestIterationFailure(
-                'Failed to ping between client and DUT. Continuing '
-                'with iterations.')
+                "Failed to ping between client and DUT. Continuing " "with iterations."
+            )
 
     def stop_soft_ap_and_verify_disconnected(self, client, soft_ap_params):
         """Tears down SoftAP, and verifies connection is down.
@@ -844,13 +873,13 @@
         Raise:
             EnvironmentError, if client and AP can still communicate
         """
-        self.log.info('Stopping SoftAP on DUT.')
+        self.log.info("Stopping SoftAP on DUT.")
         self.stop_soft_ap(soft_ap_params)
 
         if not self.verify_soft_ap_connectivity_from_state(STATE_DOWN, client):
             raise EnvironmentError(
-                'Client can still ping DUT. Continuing with '
-                'iterations.')
+                "Client can still ping DUT. Continuing with " "iterations."
+            )
 
     def start_client_mode_and_verify_connected(self, ap_params):
         """Connects DUT to AP in client mode and verifies connection
@@ -863,31 +892,31 @@
             StressTestIterationFailure, if DUT associates but connection is not
                 functioning as expected.
         """
-        ap_ssid = ap_params['ssid']
-        ap_password = ap_params['password']
-        ap_channel = ap_params['channel']
-        ap_security = ap_params.get('security')
+        ap_ssid = ap_params["ssid"]
+        ap_password = ap_params["password"]
+        ap_channel = ap_params["channel"]
+        ap_security = ap_params.get("security")
 
         if ap_security:
             ap_security_mode = ap_security.security_mode_string
         else:
             ap_security_mode = None
 
-        self.log.info('Associating DUT with AP network: %s' % ap_ssid)
+        self.log.info("Associating DUT with AP network: %s" % ap_ssid)
         associated = self.dut.associate(
             target_ssid=ap_ssid,
             target_pwd=ap_password,
-            target_security=hostapd_constants.
-            SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                ap_security_mode, None))
+            target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                ap_security_mode, None
+            ),
+        )
         if not associated:
-            raise EnvironmentError('Failed to associate DUT in client mode.')
+            raise EnvironmentError("Failed to associate DUT in client mode.")
         else:
-            self.log.info('Association successful.')
+            self.log.info("Association successful.")
 
-        if not self.verify_client_mode_connectivity_from_state(
-                STATE_UP, ap_channel):
-            raise StressTestIterationFailure('Failed to ping AP from DUT.')
+        if not self.verify_client_mode_connectivity_from_state(STATE_UP, ap_channel):
+            raise StressTestIterationFailure("Failed to ping AP from DUT.")
 
     def stop_client_mode_and_verify_disconnected(self, ap_params):
         """Disconnects DUT from AP and verifies connection is down.
@@ -898,15 +927,16 @@
         Raises:
             EnvironmentError, if DUT and AP can still communicate
         """
-        self.log.info('Disconnecting DUT from AP.')
+        self.log.info("Disconnecting DUT from AP.")
         self.dut.disconnect()
         if not self.verify_client_mode_connectivity_from_state(
-                STATE_DOWN, ap_params['channel']):
-            raise EnvironmentError('DUT can still ping AP.')
+            STATE_DOWN, ap_params["channel"]
+        ):
+            raise EnvironmentError("DUT can still ping AP.")
 
-# Toggle Stress Test Iteration and Pre-Test Functions
+    # Toggle Stress Test Iteration and Pre-Test Functions
 
-# SoftAP Toggle Stress Test Helper Functions
+    # SoftAP Toggle Stress Test Helper Functions
 
     def soft_ap_toggle_test_iteration(self, settings, current_state):
         """Runs a single iteration of SoftAP toggle stress test
@@ -921,19 +951,18 @@
                 functioning correctly.
             EnvironmentError, if toggle fails to occur at all
         """
-        soft_ap_params = settings['soft_ap_params']
-        self.log.info('Toggling SoftAP %s.' %
-                      ('down' if current_state else 'up'))
+        soft_ap_params = settings["soft_ap_params"]
+        self.log.info("Toggling SoftAP %s." % ("down" if current_state else "up"))
 
         if current_state == STATE_DOWN:
-            self.start_soft_ap_and_verify_connected(self.primary_client,
-                                                    soft_ap_params)
+            self.start_soft_ap_and_verify_connected(self.primary_client, soft_ap_params)
 
         else:
-            self.stop_soft_ap_and_verify_disconnected(self.primary_client,
-                                                      soft_ap_params)
+            self.stop_soft_ap_and_verify_disconnected(
+                self.primary_client, soft_ap_params
+            )
 
-# Client Mode Toggle Stress Test Helper Functions
+    # Client Mode Toggle Stress Test Helper Functions
 
     def client_mode_toggle_pre_test(self, settings):
         """Prepares the AP before client mode toggle tests
@@ -944,16 +973,15 @@
         Raises:
             ConnectionError, if AP setup fails
         """
-        ap_params = settings['ap_params']
-        ap_channel = ap_params['channel']
-        ap_profile = ap_params.pop('profile')
-        self.log.info('Setting up AP with params: %s' % ap_params)
-        setup_ap(access_point=self.access_point,
-                 profile_name=ap_profile,
-                 **ap_params)
+        ap_params = settings["ap_params"]
+        ap_channel = ap_params["channel"]
+        ap_profile = ap_params.pop("profile")
+        self.log.info("Setting up AP with params: %s" % ap_params)
+        setup_ap(access_point=self.access_point, profile_name=ap_profile, **ap_params)
         # Confirms AP assigned itself an address
-        ap_interface = self.get_device_test_interface(self.access_point,
-                                                      channel=ap_channel)
+        ap_interface = self.get_device_test_interface(
+            self.access_point, channel=ap_channel
+        )
         self.wait_for_ipv4_address(self.access_point, ap_interface)
 
     def client_mode_toggle_test_iteration(self, settings, current_state):
@@ -969,9 +997,8 @@
                 functioning correctly.
             EnvironmentError, if toggle fails to occur at all
         """
-        ap_params = settings['ap_params']
-        self.log.info('Toggling client mode %s' %
-                      ('off' if current_state else 'on'))
+        ap_params = settings["ap_params"]
+        self.log.info("Toggling client mode %s" % ("off" if current_state else "on"))
 
         if current_state == STATE_DOWN:
             self.start_client_mode_and_verify_connected(ap_params)
@@ -979,7 +1006,7 @@
         else:
             self.stop_client_mode_and_verify_disconnected(ap_params)
 
-# Toggle SoftAP with Client Mode Up Test Helper Functions
+    # Toggle SoftAP with Client Mode Up Test Helper Functions
 
     def soft_ap_toggle_with_client_mode_pre_test(self, settings):
         """Sets up and verifies client mode before SoftAP toggle test.
@@ -991,12 +1018,13 @@
         """
         self.client_mode_toggle_pre_test(settings)
         try:
-            self.start_client_mode_and_verify_connected(settings['ap_params'])
+            self.start_client_mode_and_verify_connected(settings["ap_params"])
         except StressTestIterationFailure as err:
             # This prevents it being treated as a routine error
             raise ConnectionError(
-                'Failed to set up DUT client mode before SoftAP toggle test.'
-                'Err: %s' % err)
+                "Failed to set up DUT client mode before SoftAP toggle test."
+                "Err: %s" % err
+            )
 
     def soft_ap_toggle_with_client_mode_iteration(
         self,
@@ -1015,15 +1043,17 @@
                 functioning correctly.
             EnvironmentError, if toggle fails to occur at all
         """
-        ap_params = settings['ap_params']
-        ap_channel = ap_params['channel']
+        ap_params = settings["ap_params"]
+        ap_channel = ap_params["channel"]
         self.soft_ap_toggle_test_iteration(settings, current_state)
         if not self.device_is_connected_to_ap(
-                self.dut, self.access_point, channel=ap_channel):
+            self.dut, self.access_point, channel=ap_channel
+        ):
             raise StressTestIterationFailure(
-                'DUT client mode is no longer functional after SoftAP toggle.')
+                "DUT client mode is no longer functional after SoftAP toggle."
+            )
 
-# Toggle Client Mode with SoftAP Up Test Helper Functions
+    # Toggle Client Mode with SoftAP Up Test Helper Functions
 
     def client_mode_toggle_with_soft_ap_pre_test(self, settings):
         """Sets up and verifies softap before client mode toggle test.
@@ -1035,16 +1065,16 @@
         """
         self.client_mode_toggle_pre_test(settings)
         try:
-            self.start_soft_ap_and_verify_connected(self.primary_client,
-                                                    settings['soft_ap_params'])
+            self.start_soft_ap_and_verify_connected(
+                self.primary_client, settings["soft_ap_params"]
+            )
         except StressTestIterationFailure as err:
             # This prevents it being treated as a routine error
             raise ConnectionError(
-                'Failed to set up SoftAP before client mode toggle test. Err: %s'
-                % err)
+                "Failed to set up SoftAP before client mode toggle test. Err: %s" % err
+            )
 
-    def client_mode_toggle_with_soft_ap_iteration(self, settings,
-                                                  current_state):
+    def client_mode_toggle_with_soft_ap_iteration(self, settings, current_state):
         """Runs single iteration of client mode toggle stress with SoftAP test.
 
         Args:
@@ -1060,9 +1090,10 @@
         self.client_mode_toggle_test_iteration(settings, current_state)
         if not self.device_is_connected_to_ap(self.primary_client, self.dut):
             raise StressTestIterationFailure(
-                'SoftAP is no longer functional after client mode toggle.')
+                "SoftAP is no longer functional after client mode toggle."
+            )
 
-# Toggle SoftAP and Client Mode Randomly
+    # Toggle SoftAP and Client Mode Randomly
 
     def run_soft_ap_and_client_mode_random_toggle_stress_test(self, settings):
         """Runner function for SoftAP and client mode random toggle tests.
@@ -1072,16 +1103,17 @@
         Args:
             settings: dict, containing test settings
         """
-        iterations = settings['iterations']
+        iterations = settings["iterations"]
         pass_count = 0
         current_soft_ap_state = STATE_DOWN
         current_client_mode_state = STATE_DOWN
-        ap_channel = settings['ap_params']['channel']
+        ap_channel = settings["ap_params"]["channel"]
 
         self.client_mode_toggle_pre_test(settings)
         for iteration in range(iterations):
-            self.log.info('Starting iteration %s out of %s.' %
-                          (str(iteration + 1), iterations))
+            self.log.info(
+                "Starting iteration %s out of %s." % (str(iteration + 1), iterations)
+            )
             passes = True
 
             # Randomly determine if softap, client mode, or both should
@@ -1089,28 +1121,33 @@
             rand_toggle_choice = random.randrange(0, 3)
             if rand_toggle_choice <= 1:
                 (current_soft_ap_state, err) = self.run_toggle_iteration_func(
-                    self.soft_ap_toggle_test_iteration, settings,
-                    current_soft_ap_state)
+                    self.soft_ap_toggle_test_iteration, settings, current_soft_ap_state
+                )
                 if err:
                     self.log.error(
-                        'Iteration %s failed toggling SoftAP. Err: %s' %
-                        (str(iteration + 1), err))
+                        "Iteration %s failed toggling SoftAP. Err: %s"
+                        % (str(iteration + 1), err)
+                    )
                     passes = False
             if rand_toggle_choice >= 1:
-                (current_client_mode_state,
-                 err) = self.run_toggle_iteration_func(
-                     self.client_mode_toggle_test_iteration, settings,
-                     current_client_mode_state)
+                (current_client_mode_state, err) = self.run_toggle_iteration_func(
+                    self.client_mode_toggle_test_iteration,
+                    settings,
+                    current_client_mode_state,
+                )
                 if err:
                     self.log.error(
-                        'Iteration %s failed toggling client mode. Err: %s' %
-                        (str(iteration + 1), err))
+                        "Iteration %s failed toggling client mode. Err: %s"
+                        % (str(iteration + 1), err)
+                    )
                     passes = False
 
             soft_ap_verified = self.verify_soft_ap_connectivity_from_state(
-                current_soft_ap_state, self.primary_client)
+                current_soft_ap_state, self.primary_client
+            )
             client_mode_verified = self.verify_client_mode_connectivity_from_state(
-                current_client_mode_state, ap_channel)
+                current_client_mode_state, ap_channel
+            )
 
             if not soft_ap_verified or not client_mode_verified:
                 passes = False
@@ -1118,367 +1155,399 @@
                 pass_count += 1
 
         if pass_count == iterations:
-            asserts.explicit_pass('Stress test passed %s/%s times.' %
-                                  (pass_count, iterations))
+            asserts.explicit_pass(
+                "Stress test passed %s/%s times." % (pass_count, iterations)
+            )
         else:
-            asserts.fail('Stress test only passed %s/%s '
-                         'times.' % (pass_count, iterations))
+            asserts.fail(
+                "Stress test only passed %s/%s " "times." % (pass_count, iterations)
+            )
 
-
-# Test Cases
+    # Test Cases
 
     def test_soft_ap_2g_open_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_OPEN,
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_open_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_5G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_OPEN,
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_5G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_open_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_OPEN,
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_2g_wep_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_WEP,
+            "password": generate_random_password(security_mode=SECURITY_WEP),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_wep_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_5G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WEP,
+            "password": generate_random_password(security_mode=SECURITY_WEP),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_5G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_wep_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WEP,
+            "password": generate_random_password(security_mode=SECURITY_WEP),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client, )
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client,
+        )
 
     def test_soft_ap_2g_wpa_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_WPA,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_wpa_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_5G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_5G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_wpa_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_2g_wpa2_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_WPA2,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_wpa2_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_5G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA2,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_5G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_wpa2_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA2,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_2g_wpa3_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_WPA3,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_wpa3_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA3,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_wpa3_local(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA3,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_2g_open_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_OPEN,
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_open_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_5G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_OPEN,
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_5G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_open_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_OPEN,
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_2g_wep_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_WEP,
+            "password": generate_random_password(security_mode=SECURITY_WEP),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_wep_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_5G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WEP,
+            "password": generate_random_password(security_mode=SECURITY_WEP),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_5G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_wep_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WEP,
+            "password": generate_random_password(security_mode=SECURITY_WEP),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_2g_wpa_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_WPA,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_wpa_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_5G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_5G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_wpa_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_2g_wpa2_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_WPA2,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_wpa2_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_5G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA2,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_5G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_wpa2_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA2,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_2g_wpa3_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            "security_type": SECURITY_WPA3,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_2G,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_5g_wpa3_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA3,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_soft_ap_any_wpa3_unrestricted(self):
         soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
+            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
+            "security_type": SECURITY_WPA3,
+            "password": generate_random_password(),
+            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
+            "operating_band": OPERATING_BAND_ANY,
         }
         self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
+        self.verify_soft_ap_associate_and_pass_traffic(
+            self.primary_client, soft_ap_params
+        )
 
     def test_multi_client(self):
         """Tests multi-client association with a single soft AP network.
@@ -1501,13 +1570,12 @@
             }
         }
         """
-        asserts.skip_if(
-            len(self.clients) < 2, 'Test requires at least 2 SoftAPClients')
+        asserts.skip_if(len(self.clients) < 2, "Test requires at least 2 SoftAPClients")
 
-        test_params = self.soft_ap_test_params.get('multi_client_test_params',
-                                                   {})
+        test_params = self.soft_ap_test_params.get("multi_client_test_params", {})
         soft_ap_params = get_soft_ap_params_from_config_or_default(
-            test_params.get('soft_ap_params', {}))
+            test_params.get("soft_ap_params", {})
+        )
 
         self.start_soft_ap(soft_ap_params)
 
@@ -1520,63 +1588,67 @@
             # Verify previously associated clients still behave as expected
             for associated_client in associated:
                 self.log.info(
-                    'Verifying previously associated client %s still functions correctly.'
-                    % associated_client['device'].identifier)
+                    "Verifying previously associated client %s still functions correctly."
+                    % associated_client["device"].identifier
+                )
                 if not self.device_is_connected_to_ap(
-                        associated_client['device'], self.dut,
-                        check_traffic=True):
+                    associated_client["device"], self.dut, check_traffic=True
+                ):
                     asserts.fail(
-                        'Previously associated client %s failed checks after '
-                        'client %s associated.' %
-                        (associated_client['device'].identifier,
-                         client.identifier))
+                        "Previously associated client %s failed checks after "
+                        "client %s associated."
+                        % (associated_client["device"].identifier, client.identifier)
+                    )
 
             client_interface = self.get_device_test_interface(client)
             client_ipv4 = self.wait_for_ipv4_address(client, client_interface)
             associated.append({"device": client, "address": client_ipv4})
 
-        self.log.info('All devices successfully associated.')
+        self.log.info("All devices successfully associated.")
 
-        self.log.info('Verifying all associated clients can ping eachother.')
+        self.log.info("Verifying all associated clients can ping eachother.")
         for transmitter in associated:
             for receiver in associated:
                 if transmitter != receiver:
-                    if not transmitter['device'].can_ping(receiver['address']):
+                    if not transmitter["device"].can_ping(receiver["address"]):
                         asserts.fail(
-                            'Could not ping from one associated client (%s) to another (%s).'
-                            % (transmitter['address'], receiver['address']))
+                            "Could not ping from one associated client (%s) to another (%s)."
+                            % (transmitter["address"], receiver["address"])
+                        )
                     else:
                         self.log.info(
-                            'Successfully pinged from associated client (%s) to another (%s)'
-                            % (transmitter['address'], receiver['address']))
+                            "Successfully pinged from associated client (%s) to another (%s)"
+                            % (transmitter["address"], receiver["address"])
+                        )
 
         self.log.info(
-            'All associated clients can ping eachother. Beginning disassociations.'
+            "All associated clients can ping eachother. Beginning disassociations."
         )
 
         while len(associated) > 0:
             # Disassociate client
-            client = associated.pop()['device']
+            client = associated.pop()["device"]
             self.disconnect_from_soft_ap(client)
 
             # Verify still connected clients still behave as expected
             for associated_client in associated:
                 self.log.info(
-                    'Verifying still associated client %s still functions '
-                    'correctly.' % associated_client['device'].identifier)
+                    "Verifying still associated client %s still functions "
+                    "correctly." % associated_client["device"].identifier
+                )
                 if not self.device_is_connected_to_ap(
-                        associated_client['device'], self.dut,
-                        check_traffic=True):
+                    associated_client["device"], self.dut, check_traffic=True
+                ):
                     asserts.fail(
-                        'Previously associated client %s failed checks after'
-                        ' client %s disassociated.' %
-                        (associated_client['device'].identifier,
-                         client.identifier))
+                        "Previously associated client %s failed checks after"
+                        " client %s disassociated."
+                        % (associated_client["device"].identifier, client.identifier)
+                    )
 
-        self.log.info('All disassociations occurred smoothly.')
+        self.log.info("All disassociations occurred smoothly.")
 
     def test_simultaneous_soft_ap_and_client(self):
-        """ Tests FuchsiaDevice DUT can act as a client and a SoftAP
+        """Tests FuchsiaDevice DUT can act as a client and a SoftAP
         simultaneously.
 
         Raises:
@@ -1585,53 +1657,54 @@
             TestFailure: if DUT fails to pass traffic as either a client or an
                 AP
         """
-        asserts.skip_if(not self.access_point, 'No access point provided.')
+        asserts.skip_if(not self.access_point, "No access point provided.")
 
-        self.log.info('Setting up AP using hostapd.')
-        test_params = self.soft_ap_test_params.get(
-            'soft_ap_and_client_test_params', {})
+        self.log.info("Setting up AP using hostapd.")
+        test_params = self.soft_ap_test_params.get("soft_ap_and_client_test_params", {})
 
         # Configure AP
         ap_params = get_ap_params_from_config_or_default(
-            test_params.get('ap_params', {}))
+            test_params.get("ap_params", {})
+        )
 
         # Setup AP and associate DUT
-        ap_profile = ap_params.pop('profile')
-        setup_ap(access_point=self.access_point,
-                 profile_name=ap_profile,
-                 **ap_params)
+        ap_profile = ap_params.pop("profile")
+        setup_ap(access_point=self.access_point, profile_name=ap_profile, **ap_params)
         try:
             self.start_client_mode_and_verify_connected(ap_params)
         except Exception as err:
-            asserts.fail('Failed to set up client mode. Err: %s' % err)
+            asserts.fail("Failed to set up client mode. Err: %s" % err)
 
         # Setup SoftAP
         soft_ap_params = get_soft_ap_params_from_config_or_default(
-            test_params.get('soft_ap_params', {}))
-        self.start_soft_ap_and_verify_connected(self.primary_client,
-                                                soft_ap_params)
+            test_params.get("soft_ap_params", {})
+        )
+        self.start_soft_ap_and_verify_connected(self.primary_client, soft_ap_params)
 
         # Get FuchsiaDevice test interfaces
         dut_ap_interface = self.get_device_test_interface(
-            self.dut, role=INTERFACE_ROLE_AP)
+            self.dut, role=INTERFACE_ROLE_AP
+        )
         dut_client_interface = self.get_device_test_interface(
-            self.dut, role=INTERFACE_ROLE_CLIENT)
+            self.dut, role=INTERFACE_ROLE_CLIENT
+        )
 
         # Get FuchsiaDevice addresses
         dut_ap_ipv4 = self.wait_for_ipv4_address(self.dut, dut_ap_interface)
-        dut_client_ipv4 = self.wait_for_ipv4_address(self.dut,
-                                                     dut_client_interface)
+        dut_client_ipv4 = self.wait_for_ipv4_address(self.dut, dut_client_interface)
 
         # Set up secondary iperf server of FuchsiaDevice
-        self.log.info('Setting up second iperf server on FuchsiaDevice DUT.')
+        self.log.info("Setting up second iperf server on FuchsiaDevice DUT.")
         secondary_iperf_server = iperf_server.IPerfServerOverSsh(
-            self.iperf_server_config, DEFAULT_IPERF_PORT + 1, use_killall=True)
+            self.iperf_server_settings, DEFAULT_IPERF_PORT + 1, use_killall=True
+        )
         secondary_iperf_server.start()
 
         # Set up iperf client on AP
-        self.log.info('Setting up iperf client on AP.')
+        self.log.info("Setting up iperf client on AP.")
         ap_iperf_client = iperf_client.IPerfClientOverSsh(
-            self.user_params['AccessPoint'][0]['ssh_config'])
+            self.access_point.ssh_settings
+        )
 
         # Setup iperf processes:
         #     Primary client <-> SoftAP interface on FuchsiaDevice
@@ -1640,18 +1713,23 @@
         iperf_soft_ap = mp.Process(
             target=self.run_iperf_traffic_parallel_process,
             args=[
-                self.iperf_clients_map[self.primary_client], dut_ap_ipv4,
-                process_errors
-            ])
+                self.iperf_clients_map[self.primary_client],
+                dut_ap_ipv4,
+                process_errors,
+            ],
+        )
 
         iperf_fuchsia_client = mp.Process(
             target=self.run_iperf_traffic_parallel_process,
             args=[ap_iperf_client, dut_client_ipv4, process_errors],
-            kwargs={'server_port': 5202})
+            kwargs={"server_port": 5202},
+        )
 
         # Run iperf processes simultaneously
-        self.log.info('Running simultaneous iperf traffic: between AP and DUT '
-                      'client interface, and DUT AP interface and client.')
+        self.log.info(
+            "Running simultaneous iperf traffic: between AP and DUT "
+            "client interface, and DUT AP interface and client."
+        )
 
         iperf_soft_ap.start()
         iperf_fuchsia_client.start()
@@ -1662,7 +1740,7 @@
             if proc.is_alive():
                 proc.terminate()
                 proc.join()
-                raise RuntimeError('Failed to join process %s' % proc)
+                raise RuntimeError("Failed to join process %s" % proc)
 
         # Stop iperf server (also stopped in teardown class as failsafe)
         secondary_iperf_server.stop()
@@ -1670,18 +1748,19 @@
         # Check errors from parallel processes
         if process_errors.empty():
             asserts.explicit_pass(
-                'FuchsiaDevice was successfully able to pass traffic as a '
-                'client and an AP simultaneously.')
+                "FuchsiaDevice was successfully able to pass traffic as a "
+                "client and an AP simultaneously."
+            )
         else:
             while not process_errors.empty():
-                self.log.error('Error in iperf process: %s' %
-                               process_errors.get())
+                self.log.error("Error in iperf process: %s" % process_errors.get())
             asserts.fail(
-                'FuchsiaDevice failed to pass traffic as a client and an AP '
-                'simultaneously.')
+                "FuchsiaDevice failed to pass traffic as a client and an AP "
+                "simultaneously."
+            )
 
     def test_soft_ap_association_stress(self):
-        """ Sets up a single AP and repeatedly associate/disassociate
+        """Sets up a single AP and repeatedly associate/disassociate
         a client, verifying connection every time
 
         Each test creates 1 SoftAP and repeatedly associates/disassociates
@@ -1702,40 +1781,39 @@
         }
         """
         tests = self.soft_ap_test_params.get(
-            'test_soft_ap_association_stress',
-            [dict(test_name='test_soft_ap_association_stress_default')])
+            "test_soft_ap_association_stress",
+            [dict(test_name="test_soft_ap_association_stress_default")],
+        )
 
         test_settings_list = []
         for config_settings in tests:
             soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
-            test_type = config_settings.get('test_type',
-                                            'associate_and_pass_traffic')
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
+                config_settings.get("soft_ap_params", {})
+            )
+            test_type = config_settings.get("test_type", "associate_and_pass_traffic")
+            iterations = config_settings.get(
+                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
+            )
             test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_association_stress_%s_iterations' %
-                    iterations),
-                'client':
-                self.primary_client,
-                'soft_ap_params':
-                soft_ap_params,
-                'test_type':
-                test_type,
-                'iterations':
-                iterations
+                "test_name": config_settings.get(
+                    "test_name",
+                    "test_soft_ap_association_stress_%s_iterations" % iterations,
+                ),
+                "client": self.primary_client,
+                "soft_ap_params": soft_ap_params,
+                "test_type": test_type,
+                "iterations": iterations,
             }
             test_settings_list.append(test_settings)
 
-        self.run_generated_testcases(self.run_soft_ap_association_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
+        self.run_generated_testcases(
+            self.run_soft_ap_association_stress_test,
+            test_settings_list,
+            name_func=get_test_name_from_settings,
+        )
 
     def test_soft_ap_and_client_mode_alternating_stress(self):
-        """ Runs tests that alternate between SoftAP and Client modes.
+        """Runs tests that alternate between SoftAP and Client modes.
 
         Each tests sets up an AP. Then, for each iteration:
             - DUT starts up SoftAP, client associates with SoftAP,
@@ -1766,44 +1844,44 @@
             ]
         }
         """
-        asserts.skip_if(not self.access_point, 'No access point provided.')
+        asserts.skip_if(not self.access_point, "No access point provided.")
         tests = self.soft_ap_test_params.get(
-            'test_soft_ap_and_client_mode_alternating_stress', [
-                dict(test_name=
-                     'test_soft_ap_and_client_mode_alternating_stress_default')
-            ])
+            "test_soft_ap_and_client_mode_alternating_stress",
+            [dict(test_name="test_soft_ap_and_client_mode_alternating_stress_default")],
+        )
 
         test_settings_list = []
         for config_settings in tests:
             ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
+                config_settings.get("ap_params", {})
+            )
             soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
+                config_settings.get("soft_ap_params", {})
+            )
+            iterations = config_settings.get(
+                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
+            )
 
             test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_and_client_mode_alternating_stress_%s_iterations'
-                    % iterations),
-                'iterations':
-                iterations,
-                'soft_ap_params':
-                soft_ap_params,
-                'ap_params':
-                ap_params,
+                "test_name": config_settings.get(
+                    "test_name",
+                    "test_soft_ap_and_client_mode_alternating_stress_%s_iterations"
+                    % iterations,
+                ),
+                "iterations": iterations,
+                "soft_ap_params": soft_ap_params,
+                "ap_params": ap_params,
             }
 
             test_settings_list.append(test_settings)
         self.run_generated_testcases(
             test_func=self.run_soft_ap_and_client_mode_alternating_test,
             settings=test_settings_list,
-            name_func=get_test_name_from_settings)
+            name_func=get_test_name_from_settings,
+        )
 
     def test_soft_ap_toggle_stress(self):
-        """ Runs SoftAP toggling stress test.
+        """Runs SoftAP toggling stress test.
 
         Each iteration toggles SoftAP to the opposite state (up or down).
 
@@ -1827,35 +1905,36 @@
         }
         """
         tests = self.soft_ap_test_params.get(
-            'test_soft_ap_toggle_stress',
-            [dict(test_name='test_soft_ap_toggle_stress_default')])
+            "test_soft_ap_toggle_stress",
+            [dict(test_name="test_soft_ap_toggle_stress_default")],
+        )
 
         test_settings_list = []
         for config_settings in tests:
             soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
+                config_settings.get("soft_ap_params", {})
+            )
+            iterations = config_settings.get(
+                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
+            )
             test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_toggle_stress_%s_iterations' % iterations),
-                'test_runner_func':
-                self.soft_ap_toggle_test_iteration,
-                'soft_ap_params':
-                soft_ap_params,
-                'iterations':
-                iterations
+                "test_name": config_settings.get(
+                    "test_name", "test_soft_ap_toggle_stress_%s_iterations" % iterations
+                ),
+                "test_runner_func": self.soft_ap_toggle_test_iteration,
+                "soft_ap_params": soft_ap_params,
+                "iterations": iterations,
             }
             test_settings_list.append(test_settings)
 
-        self.run_generated_testcases(self.run_toggle_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
+        self.run_generated_testcases(
+            self.run_toggle_stress_test,
+            test_settings_list,
+            name_func=get_test_name_from_settings,
+        )
 
     def test_client_mode_toggle_stress(self):
-        """ Runs client mode toggling stress test.
+        """Runs client mode toggling stress test.
 
         Each iteration toggles client mode to the opposite state (up or down).
 
@@ -1878,152 +1957,156 @@
             ]
         }
         """
-        asserts.skip_if(not self.access_point, 'No access point provided.')
+        asserts.skip_if(not self.access_point, "No access point provided.")
         tests = self.soft_ap_test_params.get(
-            'test_client_mode_toggle_stress',
-            [dict(test_name='test_client_mode_toggle_stress_default')])
+            "test_client_mode_toggle_stress",
+            [dict(test_name="test_client_mode_toggle_stress_default")],
+        )
 
         test_settings_list = []
         for config_settings in tests:
             ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
+                config_settings.get("ap_params", {})
+            )
+            iterations = config_settings.get(
+                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
+            )
             test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_client_mode_toggle_stress_%s_iterations' %
-                    iterations),
-                'test_runner_func':
-                self.client_mode_toggle_test_iteration,
-                'pre_test_func':
-                self.client_mode_toggle_pre_test,
-                'ap_params':
-                ap_params,
-                'iterations':
-                iterations
+                "test_name": config_settings.get(
+                    "test_name",
+                    "test_client_mode_toggle_stress_%s_iterations" % iterations,
+                ),
+                "test_runner_func": self.client_mode_toggle_test_iteration,
+                "pre_test_func": self.client_mode_toggle_pre_test,
+                "ap_params": ap_params,
+                "iterations": iterations,
             }
             test_settings_list.append(test_settings)
-        self.run_generated_testcases(self.run_toggle_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
+        self.run_generated_testcases(
+            self.run_toggle_stress_test,
+            test_settings_list,
+            name_func=get_test_name_from_settings,
+        )
 
     def test_soft_ap_toggle_stress_with_client_mode(self):
         """Same as test_soft_ap_toggle_stress, but client mode is set up
         at test start and verified after every toggle."""
-        asserts.skip_if(not self.access_point, 'No access point provided.')
+        asserts.skip_if(not self.access_point, "No access point provided.")
         tests = self.soft_ap_test_params.get(
-            'test_soft_ap_toggle_stress_with_client_mode', [
-                dict(test_name=
-                     'test_soft_ap_toggle_stress_with_client_mode_default')
-            ])
+            "test_soft_ap_toggle_stress_with_client_mode",
+            [dict(test_name="test_soft_ap_toggle_stress_with_client_mode_default")],
+        )
 
         test_settings_list = []
         for config_settings in tests:
             soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
+                config_settings.get("soft_ap_params", {})
+            )
             ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
+                config_settings.get("ap_params", {})
+            )
+            iterations = config_settings.get(
+                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
+            )
             test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_toggle_stress_with_client_mode_%s_iterations'
-                    % iterations),
-                'test_runner_func':
-                self.soft_ap_toggle_with_client_mode_iteration,
-                'pre_test_func':
-                self.soft_ap_toggle_with_client_mode_pre_test,
-                'soft_ap_params':
-                soft_ap_params,
-                'ap_params':
-                ap_params,
-                'iterations':
-                iterations
+                "test_name": config_settings.get(
+                    "test_name",
+                    "test_soft_ap_toggle_stress_with_client_mode_%s_iterations"
+                    % iterations,
+                ),
+                "test_runner_func": self.soft_ap_toggle_with_client_mode_iteration,
+                "pre_test_func": self.soft_ap_toggle_with_client_mode_pre_test,
+                "soft_ap_params": soft_ap_params,
+                "ap_params": ap_params,
+                "iterations": iterations,
             }
             test_settings_list.append(test_settings)
-        self.run_generated_testcases(self.run_toggle_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
+        self.run_generated_testcases(
+            self.run_toggle_stress_test,
+            test_settings_list,
+            name_func=get_test_name_from_settings,
+        )
 
     def test_client_mode_toggle_stress_with_soft_ap(self):
         """Same as test_client_mode_toggle_stress, but softap is set up at
         test start and verified after every toggle."""
-        asserts.skip_if(not self.access_point, 'No access point provided.')
+        asserts.skip_if(not self.access_point, "No access point provided.")
         tests = self.soft_ap_test_params.get(
-            'test_client_mode_toggle_stress_with_soft_ap', [
-                dict(test_name=
-                     'test_client_mode_toggle_stress_with_soft_ap_default')
-            ])
+            "test_client_mode_toggle_stress_with_soft_ap",
+            [dict(test_name="test_client_mode_toggle_stress_with_soft_ap_default")],
+        )
 
         test_settings_list = []
         for config_settings in tests:
             soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
+                config_settings.get("soft_ap_params", {})
+            )
             ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
+                config_settings.get("ap_params", {})
+            )
+            iterations = config_settings.get(
+                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
+            )
             test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_client_mode_toggle_stress_with_soft_ap_%s_iterations'
-                    % iterations),
-                'test_runner_func':
-                self.client_mode_toggle_with_soft_ap_iteration,
-                'pre_test_func':
-                self.client_mode_toggle_with_soft_ap_pre_test,
-                'soft_ap_params':
-                soft_ap_params,
-                'ap_params':
-                ap_params,
-                'iterations':
-                iterations
+                "test_name": config_settings.get(
+                    "test_name",
+                    "test_client_mode_toggle_stress_with_soft_ap_%s_iterations"
+                    % iterations,
+                ),
+                "test_runner_func": self.client_mode_toggle_with_soft_ap_iteration,
+                "pre_test_func": self.client_mode_toggle_with_soft_ap_pre_test,
+                "soft_ap_params": soft_ap_params,
+                "ap_params": ap_params,
+                "iterations": iterations,
             }
             test_settings_list.append(test_settings)
-        self.run_generated_testcases(self.run_toggle_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
+        self.run_generated_testcases(
+            self.run_toggle_stress_test,
+            test_settings_list,
+            name_func=get_test_name_from_settings,
+        )
 
     def test_soft_ap_and_client_mode_random_toggle_stress(self):
         """Same as above toggle stres tests, but each iteration, either softap,
         client mode, or both are toggled, then states are verified."""
-        asserts.skip_if(not self.access_point, 'No access point provided.')
+        asserts.skip_if(not self.access_point, "No access point provided.")
         tests = self.soft_ap_test_params.get(
-            'test_soft_ap_and_client_mode_random_toggle_stress', [
+            "test_soft_ap_and_client_mode_random_toggle_stress",
+            [
                 dict(
-                    test_name=
-                    'test_soft_ap_and_client_mode_random_toggle_stress_default'
+                    test_name="test_soft_ap_and_client_mode_random_toggle_stress_default"
                 )
-            ])
+            ],
+        )
 
         test_settings_list = []
         for config_settings in tests:
             soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
+                config_settings.get("soft_ap_params", {})
+            )
             ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
+                config_settings.get("ap_params", {})
+            )
+            iterations = config_settings.get(
+                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
+            )
             test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_and_client_mode_random_toggle_stress_%s_iterations'
-                    % iterations),
-                'soft_ap_params':
-                soft_ap_params,
-                'ap_params':
-                ap_params,
-                'iterations':
-                iterations
+                "test_name": config_settings.get(
+                    "test_name",
+                    "test_soft_ap_and_client_mode_random_toggle_stress_%s_iterations"
+                    % iterations,
+                ),
+                "soft_ap_params": soft_ap_params,
+                "ap_params": ap_params,
+                "iterations": iterations,
             }
             test_settings_list.append(test_settings)
         self.run_generated_testcases(
             self.run_soft_ap_and_client_mode_random_toggle_stress_test,
             test_settings_list,
-            name_func=get_test_name_from_settings)
+            name_func=get_test_name_from_settings,
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanRebootTest.py b/src/antlion/tests/wlan/functional/WlanRebootTest.py
index ed305c7..5c8406b 100644
--- a/src/antlion/tests/wlan/functional/WlanRebootTest.py
+++ b/src/antlion/tests/wlan/functional/WlanRebootTest.py
@@ -17,37 +17,35 @@
 import itertools
 import os
 import time
-
 from multiprocessing import Process
 
-from antlion import asserts
-from antlion import context
-from antlion import utils
-from antlion.controllers import iperf_client
-from antlion.controllers import iperf_server
+from antlion import context, utils
+from antlion.controllers import iperf_client, iperf_server
 from antlion.controllers.access_point import AccessPoint, setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib.hostapd_security import Security
 from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.controllers.fuchsia_lib.utils_lib import wait_for_port
+from antlion.net import wait_for_port
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
+
+from mobly import asserts, test_runner
 
 # Constants, for readibility
-AP = 'ap'
-DUT = 'dut'
+AP = "ap"
+DUT = "dut"
 DEVICES = [AP, DUT]
 
-SOFT = 'soft'
-HARD = 'hard'
+SOFT = "soft"
+HARD = "hard"
 REBOOT_TYPES = [SOFT, HARD]
 
-BAND_2G = '2g'
-BAND_5G = '5g'
+BAND_2G = "2g"
+BAND_5G = "5g"
 BANDS = [BAND_2G, BAND_5G]
 
-IPV4 = 'ipv4'
-IPV6 = 'ipv6'
+IPV4 = "ipv4"
+IPV6 = "ipv6"
 DUAL_IPV4_IPV6 = {IPV4: True, IPV6: True}
 IPV4_ONLY = {IPV4: True, IPV6: False}
 IPV6_ONLY = {IPV4: False, IPV6: True}
@@ -56,8 +54,9 @@
 INTERRUPTS = [True, False]
 OPEN_ENCRYPTION_STRING = "open"
 SECURITY_MODES = [
-    OPEN_ENCRYPTION_STRING, hostapd_constants.WPA2_STRING,
-    hostapd_constants.WPA3_STRING
+    OPEN_ENCRYPTION_STRING,
+    hostapd_constants.WPA2_STRING,
+    hostapd_constants.WPA3_STRING,
 ]
 
 DEFAULT_IPERF_TIMEOUT = 30
@@ -66,26 +65,22 @@
 DUT_IP_ADDRESS_TIMEOUT = 30  # max time for DAD to complete
 
 # Constants for Custom Reboot Tests
-ALL = 'all'
-BOTH = 'both'
+ALL = "all"
+BOTH = "both"
 
 CUSTOM_TEST_REBOOT_DEVICES = {AP: [AP], DUT: [DUT], ALL: [AP, DUT]}
 CUSTOM_TEST_REBOOT_TYPES = {SOFT: [SOFT], HARD: [HARD], ALL: [SOFT, HARD]}
-CUSTOM_TEST_BANDS = {
-    BAND_2G: [BAND_2G],
-    BAND_5G: [BAND_5G],
-    ALL: [BAND_2G, BAND_5G]
-}
+CUSTOM_TEST_BANDS = {BAND_2G: [BAND_2G], BAND_5G: [BAND_5G], ALL: [BAND_2G, BAND_5G]}
 CUSTOM_TEST_IP_VERSIONS = {
     IPV4: [IPV4_ONLY],
     IPV6: [IPV6_ONLY],
     BOTH: [DUAL_IPV4_IPV6],
-    ALL: [IPV4_ONLY, IPV6_ONLY, DUAL_IPV4_IPV6]
+    ALL: [IPV4_ONLY, IPV6_ONLY, DUAL_IPV4_IPV6],
 }
-CUSTOM_TEST_INTERRUPTS = {'true': [True], 'false': [False], ALL: [True, False]}
+CUSTOM_TEST_INTERRUPTS = {"true": [True], "false": [False], ALL: [True, False]}
 
 
-class WlanRebootTest(WifiBaseTest):
+class WlanRebootTest(base_test.WifiBaseTest):
     """Tests wlan reconnects in different reboot scenarios.
 
     Testbed Requirement:
@@ -99,26 +94,28 @@
 
     def setup_generated_tests(self):
         self._read_wlan_reboot_test_params()
-        self.generate_tests(test_logic=self.run_reboot_test,
-                            name_func=self.generate_test_name,
-                            arg_sets=self.generate_test_args())
+        self.generate_tests(
+            test_logic=self.run_reboot_test,
+            name_func=self.generate_test_name,
+            arg_sets=self.generate_test_args(),
+        )
 
     def setup_class(self):
         super().setup_class()
-        self.android_devices = getattr(self, 'android_devices', [])
-        self.fuchsia_devices = getattr(self, 'fuchsia_devices', [])
+        self.android_devices = getattr(self, "android_devices", [])
+        self.fuchsia_devices = getattr(self, "fuchsia_devices", [])
+        self.pdu_devices = getattr(self, "pdu_devices", [])
 
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
             self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
 
         self.access_point: AccessPoint = self.access_points[0]
 
@@ -131,8 +128,8 @@
                 self.iperf_client_on_dut = self.dut.create_iperf_client()
         else:
             self.log.info(
-                'Skipping iperf throughput validation as requested by ACTS '
-                'config')
+                "Skipping iperf throughput validation as requested by ACTS " "config"
+            )
 
     def setup_test(self):
         self.access_point.stop_all_aps()
@@ -141,9 +138,15 @@
             ad.droid.wakeLockAcquireBright()
             ad.droid.wakeUpNow()
         self.dut.disconnect()
+        self.dut.device.configure_wlan()
         self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
 
     def teardown_test(self):
+        # TODO(b/273923552): We take a snapshot here and before rebooting the
+        # DUT for every test because the persistence component does not make the
+        # inspect logs available for 120 seconds. This helps for debugging
+        # issues where we need previous state.
+        self.dut.device.take_bug_report()
         self.download_ap_logs()
         self.access_point.stop_all_aps()
         self.dut.disconnect()
@@ -153,13 +156,9 @@
         self.dut.turn_location_off_and_scan_toggle_off()
         self.dut.reset_wifi()
 
-    def setup_ap(self,
-                 ssid,
-                 band,
-                 ipv4=True,
-                 ipv6=False,
-                 security_mode=None,
-                 password=None):
+    def setup_ap(
+        self, ssid, band, ipv4=True, ipv6=False, security_mode=None, password=None
+    ):
         """Setup ap with basic config.
 
         Args:
@@ -171,30 +170,32 @@
         # TODO(fxb/63719): Add varying AP parameters
         security_profile = None
         if security_mode:
-            security_profile = Security(security_mode=security_mode,
-                                        password=password)
+            security_profile = Security(security_mode=security_mode, password=password)
         if band == BAND_2G:
-            setup_ap(access_point=self.access_point,
-                     profile_name='whirlwind',
-                     channel=11,
-                     ssid=ssid,
-                     security=security_profile,
-                     is_ipv6_enabled=ipv6)
+            setup_ap(
+                access_point=self.access_point,
+                profile_name="whirlwind",
+                channel=11,
+                ssid=ssid,
+                security=security_profile,
+                is_ipv6_enabled=ipv6,
+            )
         elif band == BAND_5G:
-            setup_ap(access_point=self.access_point,
-                     profile_name='whirlwind',
-                     channel=36,
-                     ssid=ssid,
-                     security=security_profile,
-                     is_ipv6_enabled=ipv6)
+            setup_ap(
+                access_point=self.access_point,
+                profile_name="whirlwind",
+                channel=36,
+                ssid=ssid,
+                security=security_profile,
+                is_ipv6_enabled=ipv6,
+            )
 
         if not ipv4:
             self.access_point.stop_dhcp()
 
-        self.log.info('Network (SSID: %s) is up.' % ssid)
+        self.log.info("Network (SSID: %s) is up." % ssid)
 
-    def setup_iperf_server_on_ap(self,
-                                 band) -> iperf_server.IPerfServerOverSsh:
+    def setup_iperf_server_on_ap(self, band) -> iperf_server.IPerfServerOverSsh:
         """Configures iperf server based on the tests band.
 
         Args:
@@ -202,14 +203,16 @@
         """
         if band == BAND_2G:
             return iperf_server.IPerfServerOverSsh(
-                self.user_params['AccessPoint'][0]['ssh_config'],
+                self.access_point.ssh_settings,
                 5201,
-                test_interface=self.access_point.wlan_2g)
+                test_interface=self.access_point.wlan_2g,
+            )
         elif band == BAND_5G:
             return iperf_server.IPerfServerOverSsh(
-                self.user_params['AccessPoint'][0]['ssh_config'],
+                self.access_point.ssh_settings,
                 5201,
-                test_interface=self.access_point.wlan_5g)
+                test_interface=self.access_point.wlan_5g,
+            )
 
     def get_iperf_server_address(self, iperf_server_on_ap, ip_version):
         """Retrieves the ip address of the iperf server.
@@ -222,27 +225,28 @@
             String, the ip address of the iperf_server
         """
         iperf_server_addresses = iperf_server_on_ap.get_interface_ip_addresses(
-            iperf_server_on_ap.test_interface)
+            iperf_server_on_ap.test_interface
+        )
         if ip_version == IPV4:
-            iperf_server_ip_address = (
-                iperf_server_addresses['ipv4_private'][0])
+            iperf_server_ip_address = iperf_server_addresses["ipv4_private"][0]
         elif ip_version == IPV6:
-            if iperf_server_addresses['ipv6_private_local']:
-                iperf_server_ip_address = (
-                    iperf_server_addresses['ipv6_private_local'][0])
+            if iperf_server_addresses["ipv6_private_local"]:
+                iperf_server_ip_address = iperf_server_addresses["ipv6_private_local"][
+                    0
+                ]
             else:
-                iperf_server_ip_address = (
-                    '%s%%%s' % (iperf_server_addresses['ipv6_link_local'][0],
-                                self.iperf_client_on_dut.test_interface))
+                iperf_server_ip_address = "%s%%%s" % (
+                    iperf_server_addresses["ipv6_link_local"][0],
+                    self.iperf_client_on_dut.test_interface,
+                )
         else:
-            raise ValueError('Invalid IP version: %s' % ip_version)
+            raise ValueError("Invalid IP version: %s" % ip_version)
 
         return iperf_server_ip_address
 
-    def verify_traffic_between_dut_and_ap(self,
-                                          iperf_server_on_ap,
-                                          iperf_client_on_dut,
-                                          ip_version=IPV4):
+    def verify_traffic_between_dut_and_ap(
+        self, iperf_server_on_ap, iperf_client_on_dut, ip_version=IPV4
+    ):
         """Runs IPerf traffic from the iperf client (dut) and the iperf
         server (and vice versa) and verifies traffic was able to pass
         successfully.
@@ -258,45 +262,57 @@
                 directions.
         """
         dut_ip_addresses = self.dut.device.get_interface_ip_addresses(
-            iperf_client_on_dut.test_interface)
+            iperf_client_on_dut.test_interface
+        )
 
         iperf_server_ip_address = self.get_iperf_server_address(
-            iperf_server_on_ap, ip_version)
+            iperf_server_on_ap, ip_version
+        )
 
         self.log.info(
-            'Attempting to pass traffic from DUT to IPerf server (%s).' %
-            iperf_server_ip_address)
-        tx_file = iperf_client_on_dut.start(iperf_server_ip_address,
-                                            '-i 1 -t 3 -J',
-                                            'reboot_tx',
-                                            timeout=DEFAULT_IPERF_TIMEOUT)
+            "Attempting to pass traffic from DUT to IPerf server (%s)."
+            % iperf_server_ip_address
+        )
+        tx_file = iperf_client_on_dut.start(
+            iperf_server_ip_address,
+            "-i 1 -t 3 -J",
+            "reboot_tx",
+            timeout=DEFAULT_IPERF_TIMEOUT,
+        )
         tx_results = iperf_server.IPerfResult(tx_file)
         if not tx_results.avg_receive_rate or tx_results.avg_receive_rate == 0:
             raise ConnectionError(
-                'Failed to pass IPerf traffic from DUT to server (%s). TX '
-                'Average Receive Rate: %s' %
-                (iperf_server_ip_address, tx_results.avg_receive_rate))
+                "Failed to pass IPerf traffic from DUT to server (%s). TX "
+                "Average Receive Rate: %s"
+                % (iperf_server_ip_address, tx_results.avg_receive_rate)
+            )
         else:
             self.log.info(
-                'Success: Traffic passed from DUT to IPerf server (%s).' %
-                iperf_server_ip_address)
+                "Success: Traffic passed from DUT to IPerf server (%s)."
+                % iperf_server_ip_address
+            )
         self.log.info(
-            'Attempting to pass traffic from IPerf server (%s) to DUT.' %
-            iperf_server_ip_address)
-        rx_file = iperf_client_on_dut.start(iperf_server_ip_address,
-                                            '-i 1 -t 3 -R -J',
-                                            'reboot_rx',
-                                            timeout=DEFAULT_IPERF_TIMEOUT)
+            "Attempting to pass traffic from IPerf server (%s) to DUT."
+            % iperf_server_ip_address
+        )
+        rx_file = iperf_client_on_dut.start(
+            iperf_server_ip_address,
+            "-i 1 -t 3 -R -J",
+            "reboot_rx",
+            timeout=DEFAULT_IPERF_TIMEOUT,
+        )
         rx_results = iperf_server.IPerfResult(rx_file)
         if not rx_results.avg_receive_rate or rx_results.avg_receive_rate == 0:
             raise ConnectionError(
-                'Failed to pass IPerf traffic from server (%s) to DUT. RX '
-                'Average Receive Rate: %s' %
-                (iperf_server_ip_address, rx_results.avg_receive_rate))
+                "Failed to pass IPerf traffic from server (%s) to DUT. RX "
+                "Average Receive Rate: %s"
+                % (iperf_server_ip_address, rx_results.avg_receive_rate)
+            )
         else:
             self.log.info(
-                'Success: Traffic passed from IPerf server (%s) to DUT.' %
-                iperf_server_ip_address)
+                "Success: Traffic passed from IPerf server (%s) to DUT."
+                % iperf_server_ip_address
+            )
 
     def start_dut_ping_process(self, iperf_server_on_ap, ip_version=IPV4):
         """Creates a  process that pings the AP from the DUT.
@@ -308,23 +324,23 @@
             iperf_server_on_ap: IPerfServer object, linked to AP
             ip_version: string, the ip version (ipv4 or ipv6)
         """
-        ap_address = self.get_iperf_server_address(iperf_server_on_ap,
-                                                   ip_version)
+        ap_address = self.get_iperf_server_address(iperf_server_on_ap, ip_version)
         if ap_address:
             self.log.info(
-                'Starting ping process to %s in parallel. Logs from this '
-                'process will be suppressed, since it will be intentionally '
-                'interrupted.' % ap_address)
-            ping_proc = Process(target=self.dut.ping,
-                                args=[ap_address],
-                                kwargs={'count': 15})
+                "Starting ping process to %s in parallel. Logs from this "
+                "process will be suppressed, since it will be intentionally "
+                "interrupted." % ap_address
+            )
+            ping_proc = Process(
+                target=self.dut.ping, args=[ap_address], kwargs={"count": 15}
+            )
             with utils.SuppressLogOutput():
                 ping_proc.start()
             # Allow for a few seconds of pinging before allowing it to be
             # interrupted.
             time.sleep(3)
         else:
-            raise ConnectionError('Failed to retrieve APs iperf address.')
+            raise ConnectionError("Failed to retrieve APs iperf address.")
 
     def prepare_dut_for_reconnection(self):
         """Perform any actions to ready DUT for reconnection.
@@ -347,26 +363,27 @@
             ConnectionError, if DUT is not connected after all timeout.
         """
         self.log.info(
-            'Checking if DUT is connected to %s network. Will retry for %s '
-            'seconds.' % (ssid, self.dut_network_connection_timeout))
+            "Checking if DUT is connected to %s network. Will retry for %s "
+            "seconds." % (ssid, self.dut_network_connection_timeout)
+        )
         timeout = time.time() + self.dut_network_connection_timeout
         while time.time() < timeout:
             try:
                 is_connected = self.dut.is_connected(ssid=ssid)
             except Exception as err:
-                self.log.debug('SL4* call failed. Retrying in 1 second.')
+                self.log.debug("SL4* call failed. Retrying in 1 second.")
                 is_connected = False
             finally:
                 if is_connected:
-                    self.log.info('Success: DUT has connected.')
+                    self.log.info("Success: DUT has connected.")
                     break
                 else:
                     self.log.debug(
-                        'DUT not connected to network %s...retrying in 1 second.'
-                        % ssid)
+                        "DUT not connected to network %s...retrying in 1 second." % ssid
+                    )
                     time.sleep(1)
         else:
-            raise ConnectionError('DUT failed to connect to the network.')
+            raise ConnectionError("DUT failed to connect to the network.")
 
     def write_csv_time_to_reconnect(self, test_name, time_to_reconnect):
         """Writes the time to reconnect to a csv file.
@@ -377,12 +394,11 @@
                 reconnect.
         """
         log_context = context.get_current_context()
-        log_path = os.path.join(log_context.get_base_output_path(),
-                                'WlanRebootTest/')
-        csv_file_name = '%stime_to_reconnect.csv' % log_path
-        self.log.info('Writing to %s' % csv_file_name)
-        with open(csv_file_name, 'a') as csv_file:
-            csv_file.write('%s,%s\n' % (test_name, time_to_reconnect))
+        log_path = os.path.join(log_context.get_base_output_path(), "WlanRebootTest/")
+        csv_file_name = "%stime_to_reconnect.csv" % log_path
+        self.log.info("Writing to %s" % csv_file_name)
+        with open(csv_file_name, "a") as csv_file:
+            csv_file.write("%s,%s\n" % (test_name, time_to_reconnect))
 
     def log_and_continue(self, run, time_to_reconnect=None, error=None):
         """Writes the time to reconnect to the csv file before continuing, used
@@ -396,17 +412,21 @@
         """
         if error:
             self.log.info(
-                'Device failed to reconnect to network %s on run %s. Error: %s'
-                % (self.ssid, run, error))
+                "Device failed to reconnect to network %s on run %s. Error: %s"
+                % (self.ssid, run, error)
+            )
             self.write_csv_time_to_reconnect(
-                '%s_run_%s' % (self.test_name, run), 'FAIL')
+                "%s_run_%s" % (self.test_name, run), "FAIL"
+            )
 
         else:
             self.log.info(
-                'Device successfully reconnected to network %s after %s seconds'
-                ' on run %s.' % (self.ssid, time_to_reconnect, run))
+                "Device successfully reconnected to network %s after %s seconds"
+                " on run %s." % (self.ssid, time_to_reconnect, run)
+            )
             self.write_csv_time_to_reconnect(
-                '%s_run_%s' % (self.test_name, run), time_to_reconnect)
+                "%s_run_%s" % (self.test_name, run), time_to_reconnect
+            )
 
     def run_reboot_test(self, settings):
         """Runs a reboot test based on a given config.
@@ -446,51 +466,53 @@
             ValueError, if reboot_type is not 'soft' or 'hard'
 
         """
-        iterations = settings.get('iterations', 1)
+        iterations = settings.get("iterations", 1)
         passed_count = 0
-        ipv4 = settings.get('ipv4', None)
-        ipv6 = settings.get('ipv6', None)
-        reboot_device = settings['reboot_device']
-        reboot_type = settings['reboot_type']
-        band = settings['band']
-        security_mode = settings.get('security_mode', None)
-        password = settings.get('password', None)
+        ipv4 = settings.get("ipv4", None)
+        ipv6 = settings.get("ipv6", None)
+        reboot_device = settings["reboot_device"]
+        reboot_type = settings["reboot_type"]
+        band = settings["band"]
+        security_mode = settings.get("security_mode", None)
+        password = settings.get("password", None)
         if security_mode:
-            if security_mode.lower() == 'open':
+            if security_mode.lower() == "open":
                 security_mode = None
             elif not password:
-                password = generate_random_password(
-                    security_mode=security_mode)
-        interrupt = settings.get('interrupt', None)
+                password = generate_random_password(security_mode=security_mode)
+        interrupt = settings.get("interrupt", None)
         # Skip hard reboots if no PDU present
         asserts.skip_if(
-            reboot_type == HARD
-            and len(self.user_params.get('PduDevice', [])) < 1,
-            'Hard reboots require a PDU device.')
+            reboot_type == HARD and len(self.pdu_devices) == 0,
+            "Hard reboots require a PDU device.",
+        )
         # Skip DUT reboot w/ interrupt tests, since they are not more helpful
         # and may cause threading issues.
         asserts.skip_if(
             (reboot_device == DUT) and interrupt,
-            'Stream interrupts for DUT reboots are prone to threading issues '
-            'and are not supported.')
+            "Stream interrupts for DUT reboots are prone to threading issues "
+            "and are not supported.",
+        )
 
         # Validate test settings.
         if not ipv4 and not ipv6:
-            raise ValueError('Either ipv4, ipv6, or both must be True.')
+            raise ValueError("Either ipv4, ipv6, or both must be True.")
         if reboot_device != DUT and reboot_device != AP:
-            raise ValueError('Invalid reboot device: %s' % reboot_device)
+            raise ValueError("Invalid reboot device: %s" % reboot_device)
         if reboot_type != SOFT and reboot_type != HARD:
-            raise ValueError('Invalid reboot type: %s' % reboot_type)
+            raise ValueError("Invalid reboot type: %s" % reboot_type)
         if band != BAND_2G and band != BAND_5G:
-            raise ValueError('Invalid band: %s' % band)
+            raise ValueError("Invalid band: %s" % band)
 
         self.setup_ap(self.ssid, band, ipv4, ipv6, security_mode, password)
         if not self.dut.associate(
-                self.ssid,
-                target_security=hostapd_constants.
-                SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(security_mode),
-                target_pwd=password):
-            raise EnvironmentError('Initial network connection failed.')
+            self.ssid,
+            target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                security_mode
+            ),
+            target_pwd=password,
+        ):
+            raise EnvironmentError("Initial network connection failed.")
 
         if not self.skip_iperf:
             dut_test_interface = self.iperf_client_on_dut.test_interface
@@ -504,28 +526,37 @@
             wait_for_port(self.iperf_server_on_ap.ssh_settings.hostname, 5201)
 
             ip_version = IPV6 if ipv6 else IPV4
-            self.verify_traffic_between_dut_and_ap(self.iperf_server_on_ap,
-                                                   self.iperf_client_on_dut,
-                                                   ip_version=ip_version)
+            self.verify_traffic_between_dut_and_ap(
+                self.iperf_server_on_ap, self.iperf_client_on_dut, ip_version=ip_version
+            )
 
         # Looping reboots for stress testing
         for run in range(iterations):
             run += 1
-            self.log.info('Starting run %s of %s.' % (run, iterations))
+            self.log.info("Starting run %s of %s." % (run, iterations))
 
             # Ping from DUT to AP during AP reboot
             if interrupt:
                 if ipv4:
                     self.start_dut_ping_process(self.iperf_server_on_ap)
                 if ipv6:
-                    self.start_dut_ping_process(self.iperf_server_on_ap,
-                                                ip_version=IPV6)
+                    self.start_dut_ping_process(
+                        self.iperf_server_on_ap, ip_version=IPV6
+                    )
+
+            # TODO(b/273923552): We take a snapshot here and during test
+            # teardown for every test because the persistence component does not
+            # make the inspect logs available for 120 seconds. This helps for
+            # debugging issues where we need previous state.
+            self.dut.device.take_bug_report()
 
             # DUT reboots
             if reboot_device == DUT:
-                if not self.skip_iperf and type(
-                        self.iperf_client_on_dut
-                ) == iperf_client.IPerfClientOverSsh:
+                if (
+                    not self.skip_iperf
+                    and type(self.iperf_client_on_dut)
+                    == iperf_client.IPerfClientOverSsh
+                ):
                     self.iperf_client_on_dut.close_ssh()
                 if reboot_type == SOFT:
                     self.dut.device.reboot()
@@ -535,14 +566,13 @@
             # AP reboots
             elif reboot_device == AP:
                 if reboot_type == SOFT:
-                    self.log.info('Cleanly stopping ap.')
+                    self.log.info("Cleanly stopping ap.")
                     self.access_point.stop_all_aps()
                 elif reboot_type == HARD:
                     if not self.skip_iperf:
                         self.iperf_server_on_ap.close_ssh()
                     self.access_point.hard_power_cycle(self.pdu_devices)
-                self.setup_ap(self.ssid, band, ipv4, ipv6, security_mode,
-                              password)
+                self.setup_ap(self.ssid, band, ipv4, ipv6, security_mode, password)
 
             self.prepare_dut_for_reconnection()
             uptime = time.time()
@@ -560,12 +590,14 @@
 
                     if ipv4:
                         self.verify_traffic_between_dut_and_ap(
-                            self.iperf_server_on_ap, self.iperf_client_on_dut)
+                            self.iperf_server_on_ap, self.iperf_client_on_dut
+                        )
                     if ipv6:
                         self.verify_traffic_between_dut_and_ap(
                             self.iperf_server_on_ap,
                             self.iperf_client_on_dut,
-                            ip_version=IPV6)
+                            ip_version=IPV6,
+                        )
 
             except ConnectionError as err:
                 self.log_and_continue(run, error=err)
@@ -575,14 +607,15 @@
 
         if passed_count == iterations:
             asserts.explicit_pass(
-                'Test Summary: device successfully reconnected to network %s '
-                '%s/%s times.' % (self.ssid, passed_count, iterations))
+                "Test Summary: device successfully reconnected to network %s "
+                "%s/%s times." % (self.ssid, passed_count, iterations)
+            )
 
         else:
             asserts.fail(
-                'Test Summary: device failed reconnection test. Reconnected to '
-                'network %s %s/%s times.' %
-                (self.ssid, passed_count, iterations))
+                "Test Summary: device failed reconnection test. Reconnected to "
+                "network %s %s/%s times." % (self.ssid, passed_count, iterations)
+            )
 
     def generate_test_name(self, settings):
         """Generates a test case name based on the reboot settings passed.
@@ -593,8 +626,11 @@
         Returns:
             A string that represents a test case name.
         """
-        test_name = "test_{reboot_type}_reboot_{reboot_device}_{band}_{security_mode}".format(
-            **settings)
+        test_name = (
+            "test_{reboot_type}_reboot_{reboot_device}_{band}_{security_mode}".format(
+                **settings
+            )
+        )
 
         if settings.get(IPV4):
             test_name += "_ipv4"
@@ -602,10 +638,10 @@
         if settings.get(IPV6):
             test_name += "_ipv6"
 
-        if settings.get('interrupt'):
+        if settings.get("interrupt"):
             test_name += "_interrupt"
 
-        if settings.get('iterations'):
+        if settings.get("iterations"):
             test_name += f"_with_{settings['iterations']}_iterations"
 
         return test_name
@@ -623,10 +659,16 @@
         else:
             interrupts = INTERRUPTS
 
-        for (reboot_device, reboot_type, band, ip_version, interrupt,
-             security_mode) in itertools.product(DEVICES, REBOOT_TYPES, BANDS,
-                                                 IP_VERSIONS, interrupts,
-                                                 SECURITY_MODES):
+        for (
+            reboot_device,
+            reboot_type,
+            band,
+            ip_version,
+            interrupt,
+            security_mode,
+        ) in itertools.product(
+            DEVICES, REBOOT_TYPES, BANDS, IP_VERSIONS, interrupts, SECURITY_MODES
+        ):
             settings = {
                 "reboot_device": reboot_device,
                 "reboot_type": reboot_type,
@@ -636,7 +678,7 @@
                 "ipv6": ip_version["ipv6"],
                 "interrupt": interrupt,
             }
-            test_args.append((settings, ))
+            test_args.append((settings,))
 
         return test_args
 
@@ -691,27 +733,28 @@
         The third example runs two tests, both hard reboots of the DUT with 5g
         and ipv4 only, one with open security and one with WPA3.
         """
-        if 'test_custom_reboots' not in self.wlan_reboot_test_params:
-            self.log.info('No custom reboots provided in ACTS config.')
+        if "test_custom_reboots" not in self.wlan_reboot_test_params:
+            self.log.info("No custom reboots provided in ACTS config.")
             return []
 
         test_args = []
-        for test in self.wlan_reboot_test_params['test_custom_reboots']:
+        for test in self.wlan_reboot_test_params["test_custom_reboots"]:
             # Ensure required params are present
             try:
-                reboot_device = test['reboot_device'].lower()
-                reboot_type = test['reboot_type'].lower()
-                band = test['band'].lower()
-                ip_version = test['ip_version'].lower()
+                reboot_device = test["reboot_device"].lower()
+                reboot_type = test["reboot_type"].lower()
+                band = test["band"].lower()
+                ip_version = test["ip_version"].lower()
             except KeyError as err:
                 raise AttributeError(
-                    'Must provide reboot_type, reboot_device, ip_version, and '
-                    'band (optionally interrupt and iterations) in custom test '
-                    'config. See test_custom_reboots docstring for details. '
-                    'Err: %s' % err)
-            security_modes = test.get('security_modes', 'open')
-            interrupt = str(test.get('interrupt', False)).lower()
-            iterations = test.get('iterations', 1)
+                    "Must provide reboot_type, reboot_device, ip_version, and "
+                    "band (optionally interrupt and iterations) in custom test "
+                    "config. See test_custom_reboots docstring for details. "
+                    "Err: %s" % err
+                )
+            security_modes = test.get("security_modes", "open")
+            interrupt = str(test.get("interrupt", False)).lower()
+            iterations = test.get("iterations", 1)
 
             if interrupt == "true" and self.skip_iperf:
                 raise AttributeError(
@@ -730,13 +773,24 @@
                     security_modes = [security_modes]
             except KeyError as err:
                 raise AttributeError(
-                    'Invalid custom test parameter provided. Err: %s' % err)
+                    "Invalid custom test parameter provided. Err: %s" % err
+                )
 
-            for (reboot_device, reboot_type, band, ip_version, interrupt,
-                 security_mode) in itertools.product(reboot_devices,
-                                                     reboot_types, bands,
-                                                     ip_versions, interrupts,
-                                                     security_modes):
+            for (
+                reboot_device,
+                reboot_type,
+                band,
+                ip_version,
+                interrupt,
+                security_mode,
+            ) in itertools.product(
+                reboot_devices,
+                reboot_types,
+                bands,
+                ip_versions,
+                interrupts,
+                security_modes,
+            ):
                 settings = {
                     "reboot_device": reboot_device,
                     "reboot_type": reboot_type,
@@ -748,16 +802,23 @@
                     "iterations": iterations,
                 }
 
-                test_args.append((settings, ))
+                test_args.append((settings,))
         return test_args
 
     def _read_wlan_reboot_test_params(self):
         self.wlan_reboot_test_params = self.user_params.get(
-            'wlan_reboot_test_params', {})
-        self.skip_iperf = self.wlan_reboot_test_params.get('skip_iperf', False)
+            "wlan_reboot_test_params", {}
+        )
+        self.skip_iperf = self.wlan_reboot_test_params.get("skip_iperf", False)
         # Times (in seconds) to wait for DUT network connection and assigning an
         # ip address to the wlan interface.
         self.dut_network_connection_timeout = self.wlan_reboot_test_params.get(
-            'dut_network_connection_timeout', DUT_NETWORK_CONNECTION_TIMEOUT)
+            "dut_network_connection_timeout", DUT_NETWORK_CONNECTION_TIMEOUT
+        )
         self.dut_ip_address_timeout = self.wlan_reboot_test_params.get(
-            'dut_ip_address_timeout', DUT_IP_ADDRESS_TIMEOUT)
+            "dut_ip_address_timeout", DUT_IP_ADDRESS_TIMEOUT
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanScanTest.py b/src/antlion/tests/wlan/functional/WlanScanTest.py
index ed5d08a..ba6961a 100644
--- a/src/antlion/tests/wlan/functional/WlanScanTest.py
+++ b/src/antlion/tests/wlan/functional/WlanScanTest.py
@@ -21,15 +21,16 @@
 
 from datetime import datetime
 
-from antlion import signals
 from antlion.controllers.ap_lib import hostapd_ap_preset
 from antlion.controllers.ap_lib import hostapd_bss_settings
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
+
+from mobly import signals, test_runner
 
 
-class WlanScanTest(WifiBaseTest):
+class WlanScanTest(base_test.WifiBaseTest):
     """WLAN scan test class.
 
     Test Bed Requirement:
@@ -44,8 +45,8 @@
         self.access_point = self.access_points[0]
         self.start_access_point = False
         for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='drivers')
-        if "AccessPoint" in self.user_params:
+            fd.configure_wlan(association_mechanism="drivers")
+        if self.access_point:
             # This section sets up the config that could be sent to the AP if
             # the AP is needed. The reasoning is since ACTS already connects
             # to the AP if it is in the config, generating the config in memory
@@ -61,69 +62,73 @@
             bss_settings_2g = []
             bss_settings_5g = []
             open_network = self.get_open_network(False, [])
-            self.open_network_2g = open_network['2g']
-            self.open_network_5g = open_network['5g']
+            self.open_network_2g = open_network["2g"]
+            self.open_network_5g = open_network["5g"]
             wpa2_settings = self.get_psk_network(False, [])
-            self.wpa2_network_2g = wpa2_settings['2g']
-            self.wpa2_network_5g = wpa2_settings['5g']
+            self.wpa2_network_2g = wpa2_settings["2g"]
+            self.wpa2_network_5g = wpa2_settings["5g"]
             bss_settings_2g.append(
                 hostapd_bss_settings.BssSettings(
-                    name=self.wpa2_network_2g['SSID'],
-                    ssid=self.wpa2_network_2g['SSID'],
+                    name=self.wpa2_network_2g["SSID"],
+                    ssid=self.wpa2_network_2g["SSID"],
                     security=hostapd_security.Security(
                         security_mode=self.wpa2_network_2g["security"],
-                        password=self.wpa2_network_2g["password"])))
+                        password=self.wpa2_network_2g["password"],
+                    ),
+                )
+            )
             bss_settings_5g.append(
                 hostapd_bss_settings.BssSettings(
-                    name=self.wpa2_network_5g['SSID'],
-                    ssid=self.wpa2_network_5g['SSID'],
+                    name=self.wpa2_network_5g["SSID"],
+                    ssid=self.wpa2_network_5g["SSID"],
                     security=hostapd_security.Security(
                         security_mode=self.wpa2_network_5g["security"],
-                        password=self.wpa2_network_5g["password"])))
+                        password=self.wpa2_network_5g["password"],
+                    ),
+                )
+            )
             self.ap_2g = hostapd_ap_preset.create_ap_preset(
                 iface_wlan_2g=self.access_point.wlan_2g,
                 iface_wlan_5g=self.access_point.wlan_5g,
                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                ssid=self.open_network_2g['SSID'],
-                bss_settings=bss_settings_2g)
+                ssid=self.open_network_2g["SSID"],
+                bss_settings=bss_settings_2g,
+            )
             self.ap_5g = hostapd_ap_preset.create_ap_preset(
                 iface_wlan_2g=self.access_point.wlan_2g,
                 iface_wlan_5g=self.access_point.wlan_5g,
                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                ssid=self.open_network_5g['SSID'],
-                bss_settings=bss_settings_5g)
+                ssid=self.open_network_5g["SSID"],
+                bss_settings=bss_settings_5g,
+            )
 
         if "wlan_open_network_2g" in self.user_params:
             self.open_network_2g = self.user_params.get("wlan_open_network_2g")
-        elif "AccessPoint" in self.user_params:
+        elif self.access_point:
             self.start_access_point_2g = True
         else:
-            raise Exception('Missing parameter in config '
-                            '(wlan_open_network_2g)')
+            raise Exception("Missing parameter in config " "(wlan_open_network_2g)")
 
         if "wlan_open_network_5g" in self.user_params:
             self.open_network_5g = self.user_params.get("wlan_open_network_5g")
-        elif "AccessPoint" in self.user_params:
+        elif self.access_point:
             self.start_access_point_5g = True
         else:
-            raise Exception('Missing parameter in config '
-                            '(wlan_open_network_5g)')
+            raise Exception("Missing parameter in config " "(wlan_open_network_5g)")
 
         if "wlan_wpa2_network_2g" in self.user_params:
             self.wpa2_network_2g = self.user_params.get("wlan_wpa2_network_2g")
-        elif "AccessPoint" in self.user_params:
+        elif self.access_point:
             self.start_access_point_2g = True
         else:
-            raise Exception('Missing parameter in config '
-                            '(wlan_wpa2_network_2g)')
+            raise Exception("Missing parameter in config " "(wlan_wpa2_network_2g)")
 
         if "wlan_wpa2_network_5g" in self.user_params:
             self.wpa2_network_5g = self.user_params.get("wlan_wpa2_network_5g")
-        elif "AccessPoint" in self.user_params:
+        elif self.access_point:
             self.start_access_point_5g = True
         else:
-            raise Exception('Missing parameter in config '
-                            '(wlan_wpa2_network_5g)')
+            raise Exception("Missing parameter in config " "(wlan_wpa2_network_5g)")
 
         # Only bring up the APs that are needed for the test.  Each ssid is
         # randomly generated so there is no chance of re associating to a
@@ -152,15 +157,15 @@
     def on_fail(self, test_name, begin_time):
         for fd in self.fuchsia_devices:
             super().on_device_fail(fd, test_name, begin_time)
-            fd.configure_wlan(association_mechanism='drivers')
+            fd.configure_wlan(association_mechanism="drivers")
 
     """Helper Functions"""
 
     def check_connect_response(self, connection_response):
-        """ Checks the result of connecting to a wlan.
-            Args:
-                connection_response: The response from SL4F after attempting
-                    to connect to a wlan.
+        """Checks the result of connecting to a wlan.
+        Args:
+            connection_response: The response from SL4F after attempting
+                to connect to a wlan.
         """
         if connection_response.get("error") is None:
             # the command did not get an error response - go ahead and
@@ -174,35 +179,35 @@
                 raise signals.TestFailure("Connect call failed, aborting test")
         else:
             # the response indicates an error - log and raise failure
-            raise signals.TestFailure("Aborting test - Connect call failed "
-                                      "with error: %s" %
-                                      connection_response.get("error"))
+            raise signals.TestFailure(
+                "Aborting test - Connect call failed "
+                "with error: %s" % connection_response.get("error")
+            )
 
     def scan_while_connected(self, wlan_network_params, fd):
-        """ Connects to as specified network and initiates a scan
-                Args:
-                    wlan_network_params: A dictionary containing wlan
-                        infomation.
-                    fd: The fuchsia device to connect to the wlan.
+        """Connects to as specified network and initiates a scan
+        Args:
+            wlan_network_params: A dictionary containing wlan
+                infomation.
+            fd: The fuchsia device to connect to the wlan.
         """
-        target_ssid = wlan_network_params['SSID']
+        target_ssid = wlan_network_params["SSID"]
         self.log.info("got the ssid! %s", target_ssid)
         target_pwd = None
-        if 'password' in wlan_network_params:
-            target_pwd = wlan_network_params['password']
+        if "password" in wlan_network_params:
+            target_pwd = wlan_network_params["password"]
 
-        bss_scan_response = fd.sl4f.wlan_lib.wlanScanForBSSInfo().get('result')
+        bss_scan_response = fd.sl4f.wlan_lib.wlanScanForBSSInfo().get("result")
         connection_response = fd.sl4f.wlan_lib.wlanConnectToNetwork(
-            target_ssid,
-            bss_scan_response[target_ssid][0],
-            target_pwd=target_pwd)
+            target_ssid, bss_scan_response[target_ssid][0], target_pwd=target_pwd
+        )
         self.check_connect_response(connection_response)
         self.basic_scan_request(fd)
 
     def basic_scan_request(self, fd):
-        """ Initiates a basic scan on a Fuchsia device
-            Args:
-                fd: A fuchsia device
+        """Initiates a basic scan on a Fuchsia device
+        Args:
+            fd: A fuchsia device
         """
         start_time = datetime.now()
 
@@ -215,8 +220,10 @@
             scan_results = scan_response["result"]
         else:
             # the response indicates an error - log and raise failure
-            raise signals.TestFailure("Aborting test - scan failed with "
-                                      "error: %s" % scan_response.get("error"))
+            raise signals.TestFailure(
+                "Aborting test - scan failed with "
+                "error: %s" % scan_response.get("error")
+            )
 
         self.log.info("scan contained %d results", len(scan_results))
 
@@ -224,11 +231,11 @@
         self.log.info("scan time: %d ms", total_time_ms)
 
         if len(scan_results) > 0:
-            raise signals.TestPass(details="",
-                                   extras={"Scan time": "%d" % total_time_ms})
+            raise signals.TestPass(
+                details="", extras={"Scan time": "%d" % total_time_ms}
+            )
         else:
-            raise signals.TestFailure("Scan failed or did not "
-                                      "find any networks")
+            raise signals.TestFailure("Scan failed or did not " "find any networks")
 
     """Tests"""
 
@@ -252,3 +259,7 @@
     def test_scan_while_connected_wpa2_network_5g(self):
         for fd in self.fuchsia_devices:
             self.scan_while_connected(self.wpa2_network_5g, fd)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py b/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py
index 631df3b..6cfdc0a 100644
--- a/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py
+++ b/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py
@@ -14,18 +14,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import asserts
 from antlion import utils
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 
+from mobly import asserts, test_runner
+
 
 # TODO(fxb/68956): Add security protocol check to mixed mode tests when info is
 # available.
-class WlanTargetSecurityTest(WifiBaseTest):
+class WlanTargetSecurityTest(base_test.WifiBaseTest):
     """Tests Fuchsia's target security concept and security upgrading
 
     Testbed Requirements:
@@ -34,15 +35,14 @@
     """
 
     def setup_class(self):
-        if 'dut' in self.user_params and self.user_params[
-                'dut'] != 'fuchsia_devices':
+        if "dut" in self.user_params and self.user_params["dut"] != "fuchsia_devices":
             raise AttributeError(
-                'WlanTargetSecurityTest is only relevant for Fuchsia devices.')
+                "WlanTargetSecurityTest is only relevant for Fuchsia devices."
+            )
 
         self.dut = create_wlan_device(self.fuchsia_devices[0])
-        if self.dut.device.association_mechanism != 'policy':
-            raise AttributeError(
-                'Must use WLAN policy layer to test target security.')
+        if self.dut.device.association_mechanism != "policy":
+            raise AttributeError("Must use WLAN policy layer to test target security.")
 
         self.access_point = self.access_points[0]
 
@@ -65,7 +65,7 @@
         self.access_point.stop_all_aps()
 
     def setup_ap(self, security_mode=None):
-        """ Sets up an AP using the provided security mode.
+        """Sets up an AP using the provided security mode.
 
         Args:
             security_mode: string, security mode for AP
@@ -80,278 +80,311 @@
         security_profile = None
 
         if security_mode:
-            security_profile = Security(security_mode=security_mode,
-                                        password=password)
+            security_profile = Security(security_mode=security_mode, password=password)
 
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            security=security_profile,
+        )
 
         return (ssid, password)
 
     # Open Security on AP
     def test_associate_open_ap_with_open_target_security(self):
         ssid, _ = self.setup_ap()
-        asserts.assert_true(self.dut.associate(ssid), 'Failed to associate.')
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
 
     def test_reject_open_ap_with_wep_target_security(self):
         ssid, password = self.setup_ap()
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_reject_open_ap_with_wpa_target_security(self):
         ssid, password = self.setup_ap()
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_reject_open_ap_with_wpa2_target_security(self):
         ssid, password = self.setup_ap()
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_reject_open_ap_with_wpa3_target_security(self):
         ssid, password = self.setup_ap()
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     # WEP Security on AP
     def test_reject_wep_ap_with_open_target_security(self):
         ssid, _ = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
 
     def test_associate_wep_ap_with_wep_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
 
     def test_reject_wep_ap_with_wpa_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_reject_wep_ap_with_wpa2_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_reject_wep_ap_with_wpa3_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     # WPA Security on AP
     def test_reject_wpa_ap_with_open_target_security(self):
         ssid, _ = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
 
     def test_reject_wpa_ap_with_wep_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_associate_wpa_ap_with_wpa_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
 
     def test_reject_wpa_ap_with_wpa2_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_reject_wpa_ap_with_wpa3_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     # WPA2 Security on AP
     def test_reject_wpa2_ap_with_open_target_security(self):
         ssid, _ = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
 
     def test_reject_wpa2_ap_with_wep_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_associate_wpa2_ap_with_wpa_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
 
     def test_associate_wpa2_ap_with_wpa2_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
 
     def test_reject_wpa2_ap_with_wpa3_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     # WPA/WPA2 Security on AP
     def test_reject_wpa_wpa2_ap_with_open_target_security(self):
         ssid, _ = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
 
     def test_reject_wpa_wpa2_ap_with_wep_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_associate_wpa_wpa2_ap_with_wpa_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
 
     def test_associate_wpa_wpa2_ap_with_wpa2_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
 
     def test_reject_wpa_wpa2_ap_with_wpa3_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     # WPA3 Security on AP
     def test_reject_wpa3_ap_with_open_target_security(self):
         ssid, _ = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
 
     def test_reject_wpa3_ap_with_wep_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_associate_wpa3_ap_with_wpa_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password),
-            'Expected failure to associate. WPA credentials for WPA3 was '
-            'temporarily disabled, see https://fxbug.dev/85817 for context. '
-            'If this feature was reenabled, please update this test\'s '
-            'expectation.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
+            ),
+            "Expected failure to associate. WPA credentials for WPA3 was "
+            "temporarily disabled, see https://fxbug.dev/85817 for context. "
+            "If this feature was reenabled, please update this test's "
+            "expectation.",
+        )
 
     def test_associate_wpa3_ap_with_wpa2_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
 
     def test_associate_wpa3_ap_with_wpa3_target_security(self):
         ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
 
     # WPA2/WPA3 Security on AP
     def test_reject_wpa2_wpa3_ap_with_open_target_security(self):
         ssid, _ = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
 
     def test_reject_wpa2_wpa3_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(
-            hostapd_constants.WPA2_WPA3_MIXED_STRING)
+        ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
 
     def test_associate_wpa2_wpa3_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(
-            hostapd_constants.WPA2_WPA3_MIXED_STRING)
+        ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
         asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password),
-            'Expected failure to associate. WPA credentials for WPA3 was '
-            'temporarily disabled, see https://fxbug.dev/85817 for context. '
-            'If this feature was reenabled, please update this test\'s '
-            'expectation.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
+            ),
+            "Expected failure to associate. WPA credentials for WPA3 was "
+            "temporarily disabled, see https://fxbug.dev/85817 for context. "
+            "If this feature was reenabled, please update this test's "
+            "expectation.",
+        )
 
     def test_associate_wpa2_wpa3_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(
-            hostapd_constants.WPA2_WPA3_MIXED_STRING)
+        ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
 
     def test_associate_wpa2_wpa3_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(
-            hostapd_constants.WPA2_WPA3_MIXED_STRING)
+        ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
         asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password), 'Failed to associate.')
+            self.dut.associate(
+                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py b/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
index ffd41e2..4b0e9b0 100644
--- a/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
+++ b/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
@@ -17,25 +17,30 @@
 import time
 
 from datetime import datetime, timedelta, timezone
-from typing import FrozenSet
+from typing import FrozenSet, Optional
 
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion import asserts
-from antlion import signals
 from antlion import utils
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.radio_measurement import BssidInformation, BssidInformationCapabilities, NeighborReportElement, PhyType
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionManagementRequest
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.radio_measurement import (
+    BssidInformation,
+    BssidInformationCapabilities,
+    NeighborReportElement,
+    PhyType,
+)
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
+from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
+from antlion.test_utils.wifi import base_test
+
+from mobly import asserts, signals, test_runner
 
 
-# TODO(fxbug.dev/103440) WNM support should be visible/controllable in ACTS.
-# When ACTS can see WNM features that are enabled (through ACTS config) or
-# ACTS can enable WNM features (through new APIs), additional tests should be
-# added to this suite that check that features function properly when the DUT is
-# configured to support those features.
-class WlanWirelessNetworkManagementTest(WifiBaseTest):
+# Antlion can see (via the wlan_features config directive) whether WNM features
+# are enabled, and runs or skips tests depending on presence of WNM features.
+class WlanWirelessNetworkManagementTest(base_test.WifiBaseTest):
     """Tests Fuchsia's Wireless Network Management (AKA 802.11v) support.
 
     Testbed Requirements:
@@ -47,15 +52,14 @@
     """
 
     def setup_class(self):
-        if 'dut' in self.user_params and self.user_params[
-                'dut'] != 'fuchsia_devices':
+        if "dut" in self.user_params and self.user_params["dut"] != "fuchsia_devices":
             raise AttributeError(
-                'WlanWirelessNetworkManagementTest is only relevant for Fuchsia devices.'
+                "WlanWirelessNetworkManagementTest is only relevant for Fuchsia devices."
             )
 
         self.dut = create_wlan_device(self.fuchsia_devices[0])
-        if self.dut.device.association_mechanism != 'policy':
-            raise AttributeError('Must use WLAN policy layer to test WNM.')
+        if self.dut.device.association_mechanism != "policy":
+            raise AttributeError("Must use WLAN policy layer to test WNM.")
         self.access_point = self.access_points[0]
 
     def teardown_class(self):
@@ -79,23 +83,33 @@
     def setup_ap(
         self,
         ssid: str,
+        security_mode: Optional[str] = None,
+        additional_ap_parameters: Optional[dict] = None,
         channel: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset()):
+        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    ):
         """Sets up an AP using the provided parameters.
 
         Args:
             ssid: SSID for the AP.
+            security_mode: expressed as string (e.g. WPA2, default is None
+                which indicates open security).
+            additional_ap_parameters: A dictionary of parameters that can sent
+                directly into the hostapd config file.
             channel: which channel number to set the AP to (default is
                 AP_DEFAULT_CHANNEL_2G).
             wnm_features: Wireless Network Management features to enable
                 (default is no WNM features).
         """
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=channel,
-                 ssid=ssid,
-                 security=None,
-                 wnm_features=wnm_features)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=channel,
+            ssid=ssid,
+            security=Security(security_mode),
+            additional_ap_parameters=additional_ap_parameters,
+            wnm_features=wnm_features,
+        )
 
     def _get_client_mac(self) -> str:
         """Get the MAC address of the DUT client interface.
@@ -107,107 +121,301 @@
             ConnectionError if the DUT interface query fails.
         """
         wlan_ifaces = self.dut.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if wlan_ifaces.get('error'):
-            raise ConnectionError('Failed to get wlan interface IDs: %s' %
-                                  wlan_ifaces['error'])
+        if wlan_ifaces.get("error"):
+            raise ConnectionError(
+                "Failed to get wlan interface IDs: %s" % wlan_ifaces["error"]
+            )
 
-        for wlan_iface in wlan_ifaces['result']:
-            iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(
-                wlan_iface)
-            if iface_info.get('error'):
-                raise ConnectionError('Failed to query wlan iface: %s' %
-                                      iface_info['error'])
+        for wlan_iface in wlan_ifaces["result"]:
+            iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(wlan_iface)
+            if iface_info.get("error"):
+                raise ConnectionError(
+                    "Failed to query wlan iface: %s" % iface_info["error"]
+                )
 
-            if iface_info['result']['role'] == 'Client':
-                return utils.mac_address_list_to_str(
-                    iface_info['result']['sta_addr'])
+            if iface_info["result"]["role"] == "Client":
+                return utils.mac_address_list_to_str(iface_info["result"]["sta_addr"])
         raise ValueError(
-            'Failed to get client interface mac address. No client interface found.'
+            "Failed to get client interface mac address. No client interface found."
         )
 
-    def test_bss_transition_ap_supported_dut_unsupported(self):
+    def test_bss_transition_is_not_advertised_when_ap_supported_dut_unsupported(self):
+        if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is present")
+
         ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
         wnm_features = frozenset(
-            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT])
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
         self.setup_ap(ssid, wnm_features=wnm_features)
-        asserts.assert_true(self.dut.associate(ssid), 'Failed to associate.')
-        asserts.assert_true(self.dut.is_connected(), 'Failed to connect.')
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
         client_mac = self._get_client_mac()
 
         ext_capabilities = self.access_point.get_sta_extended_capabilities(
-            self.access_point.wlan_2g, client_mac)
+            self.access_point.wlan_2g, client_mac
+        )
         asserts.assert_false(
             ext_capabilities.bss_transition,
-            'DUT is incorrectly advertising BSS Transition Management support')
+            "DUT is incorrectly advertising BSS Transition Management support",
+        )
 
-    def test_wnm_sleep_mode_ap_supported_dut_unsupported(self):
+    def test_bss_transition_is_advertised_when_ap_supported_dut_supported(self):
+        if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is not present")
+
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        wnm_features = frozenset(
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
+        self.setup_ap(ssid, wnm_features=wnm_features)
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
+        client_mac = self._get_client_mac()
+
+        ext_capabilities = self.access_point.get_sta_extended_capabilities(
+            self.access_point.wlan_2g, client_mac
+        )
+        asserts.assert_true(
+            ext_capabilities.bss_transition,
+            "DUT is not advertising BSS Transition Management support",
+        )
+
+    def test_wnm_sleep_mode_is_not_advertised_when_ap_supported_dut_unsupported(self):
         ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
         wnm_features = frozenset([hostapd_constants.WnmFeature.WNM_SLEEP_MODE])
         self.setup_ap(ssid, wnm_features=wnm_features)
-        asserts.assert_true(self.dut.associate(ssid), 'Failed to associate.')
-        asserts.assert_true(self.dut.is_connected(), 'Failed to connect.')
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
         client_mac = self._get_client_mac()
 
         ext_capabilities = self.access_point.get_sta_extended_capabilities(
-            self.access_point.wlan_2g, client_mac)
+            self.access_point.wlan_2g, client_mac
+        )
         asserts.assert_false(
             ext_capabilities.wnm_sleep_mode,
-            'DUT is incorrectly advertising WNM Sleep Mode support')
+            "DUT is incorrectly advertising WNM Sleep Mode support",
+        )
 
-    def test_btm_req_ignored_dut_unsupported(self):
+    def test_roam_on_btm_req(self):
+        if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is not present")
+
         ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
         wnm_features = frozenset(
-            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT])
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
         # Setup 2.4 GHz AP.
-        self.setup_ap(ssid,
-                      channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                      wnm_features=wnm_features)
+        self.setup_ap(
+            ssid,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            wnm_features=wnm_features,
+        )
 
-        asserts.assert_true(self.dut.associate(ssid), 'Failed to associate.')
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
         # Verify that DUT is actually associated (as seen from AP).
         client_mac = self._get_client_mac()
         asserts.assert_true(
-            client_mac
-            in self.access_point.get_stas(self.access_point.wlan_2g),
-            'Client MAC not included in list of associated STAs on the 2.4GHz band'
+            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
+            "Client MAC not included in list of associated STAs on the 2.4GHz band",
         )
 
         # Setup 5 GHz AP with same SSID.
-        self.setup_ap(ssid,
-                      channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                      wnm_features=wnm_features)
+        self.setup_ap(
+            ssid,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            wnm_features=wnm_features,
+        )
 
         # Construct a BTM request.
         dest_bssid = self.access_point.get_bssid_from_ssid(
-            ssid, self.access_point.wlan_5g)
+            ssid, self.access_point.wlan_5g
+        )
         dest_bssid_info = BssidInformation(
-            security=True, capabilities=BssidInformationCapabilities())
+            security=True, capabilities=BssidInformationCapabilities()
+        )
         neighbor_5g_ap = NeighborReportElement(
             dest_bssid,
             dest_bssid_info,
             operating_class=126,
             channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            phy_type=PhyType.VHT)
+            phy_type=PhyType.VHT,
+        )
         btm_req = BssTransitionManagementRequest(
-            disassociation_imminent=True, candidate_list=[neighbor_5g_ap])
+            preferred_candidate_list_included=True,
+            disassociation_imminent=True,
+            candidate_list=[neighbor_5g_ap],
+        )
+
+        # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug.
+        # TODO(fxbug.dev/117517) Remove when fixed, or when non-firmware BTM support is merged.
+        time.sleep(5)
 
         # Send BTM request from 2.4 GHz AP to DUT
         self.access_point.send_bss_transition_management_req(
-            self.access_point.wlan_2g, client_mac, btm_req)
+            self.access_point.wlan_2g, client_mac, btm_req
+        )
+
+        # Check that DUT has reassociated.
+        REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
+        while datetime.now(timezone.utc) < REASSOC_DEADLINE:
+            if client_mac in self.access_point.get_stas(self.access_point.wlan_5g):
+                break
+            else:
+                time.sleep(0.25)
+
+        # Verify that DUT roamed (as seen from AP).
+        asserts.assert_true(
+            client_mac in self.access_point.get_stas(self.access_point.wlan_5g),
+            "Client MAC not included in list of associated STAs on the 5GHz band",
+        )
+
+    def test_btm_req_ignored_dut_unsupported(self):
+        if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is present")
+
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        wnm_features = frozenset(
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
+        # Setup 2.4 GHz AP.
+        self.setup_ap(
+            ssid,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            wnm_features=wnm_features,
+        )
+
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        # Verify that DUT is actually associated (as seen from AP).
+        client_mac = self._get_client_mac()
+        asserts.assert_true(
+            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
+            "Client MAC not included in list of associated STAs on the 2.4GHz band",
+        )
+
+        # Setup 5 GHz AP with same SSID.
+        self.setup_ap(
+            ssid,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            wnm_features=wnm_features,
+        )
+
+        # Construct a BTM request.
+        dest_bssid = self.access_point.get_bssid_from_ssid(
+            ssid, self.access_point.wlan_5g
+        )
+        dest_bssid_info = BssidInformation(
+            security=True, capabilities=BssidInformationCapabilities()
+        )
+        neighbor_5g_ap = NeighborReportElement(
+            dest_bssid,
+            dest_bssid_info,
+            operating_class=126,
+            channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            phy_type=PhyType.VHT,
+        )
+        btm_req = BssTransitionManagementRequest(
+            disassociation_imminent=True, candidate_list=[neighbor_5g_ap]
+        )
+
+        # Send BTM request from 2.4 GHz AP to DUT
+        self.access_point.send_bss_transition_management_req(
+            self.access_point.wlan_2g, client_mac, btm_req
+        )
 
         # Check that DUT has not reassociated.
         REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
         while datetime.now(timezone.utc) < REASSOC_DEADLINE:
             # Fail if DUT has reassociated to 5 GHz AP (as seen from AP).
-            if client_mac in self.access_point.get_stas(
-                    self.access_point.wlan_5g):
+            if client_mac in self.access_point.get_stas(self.access_point.wlan_5g):
                 raise signals.TestFailure(
-                    'DUT unexpectedly roamed to target BSS after BTM request')
+                    "DUT unexpectedly roamed to target BSS after BTM request"
+                )
             else:
                 time.sleep(0.25)
 
         # DUT should have stayed associated to original AP.
         asserts.assert_true(
-            client_mac
-            in self.access_point.get_stas(self.access_point.wlan_2g),
-            'DUT lost association on the 2.4GHz band after BTM request')
+            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
+            "DUT lost association on the 2.4GHz band after BTM request",
+        )
+
+    def test_btm_req_target_ap_rejects_reassoc(self):
+        if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is not present")
+
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        wnm_features = frozenset(
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
+        # Setup 2.4 GHz AP.
+        self.setup_ap(
+            ssid,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            wnm_features=wnm_features,
+        )
+
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        # Verify that DUT is actually associated (as seen from AP).
+        client_mac = self._get_client_mac()
+        asserts.assert_true(
+            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
+            "Client MAC not included in list of associated STAs on the 2.4GHz band",
+        )
+
+        # Setup 5 GHz AP with same SSID, but reject all STAs.
+        reject_all_sta_param = {"max_num_sta": 0}
+        self.setup_ap(
+            ssid,
+            additional_ap_parameters=reject_all_sta_param,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            wnm_features=wnm_features,
+        )
+
+        # Construct a BTM request.
+        dest_bssid = self.access_point.get_bssid_from_ssid(
+            ssid, self.access_point.wlan_5g
+        )
+        dest_bssid_info = BssidInformation(
+            security=True, capabilities=BssidInformationCapabilities()
+        )
+        neighbor_5g_ap = NeighborReportElement(
+            dest_bssid,
+            dest_bssid_info,
+            operating_class=126,
+            channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            phy_type=PhyType.VHT,
+        )
+        btm_req = BssTransitionManagementRequest(
+            disassociation_imminent=True, candidate_list=[neighbor_5g_ap]
+        )
+
+        # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug.
+        # TODO(fxbug.dev/117517) Remove when fixed, or when non-firmware BTM support is merged.
+        time.sleep(5)
+
+        # Send BTM request from 2.4 GHz AP to DUT
+        self.access_point.send_bss_transition_management_req(
+            self.access_point.wlan_2g, client_mac, btm_req
+        )
+
+        # Check that DUT has not reassociated.
+        REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
+        while datetime.now(timezone.utc) < REASSOC_DEADLINE:
+            # Fail if DUT has reassociated to 5 GHz AP (as seen from AP).
+            if client_mac in self.access_point.get_stas(self.access_point.wlan_5g):
+                raise signals.TestFailure(
+                    "DUT unexpectedly roamed to target BSS after BTM request"
+                )
+            else:
+                time.sleep(0.25)
+
+        # DUT should have stayed associated to original AP.
+        asserts.assert_true(
+            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
+            "DUT lost association on the 2.4GHz band after BTM request",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/wlan_reboot_test_params.yaml b/src/antlion/tests/wlan/functional/wlan_reboot_test_params.yaml
new file mode 100644
index 0000000..c559bbc
--- /dev/null
+++ b/src/antlion/tests/wlan/functional/wlan_reboot_test_params.yaml
@@ -0,0 +1,2 @@
+wlan_reboot_test_params:
+  skip_iperf: false
diff --git a/src/antlion/tests/wlan/misc/BUILD.gn b/src/antlion/tests/wlan/misc/BUILD.gn
new file mode 100644
index 0000000..a0250c8
--- /dev/null
+++ b/src/antlion/tests/wlan/misc/BUILD.gn
@@ -0,0 +1,26 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("wlan_interface_test") {
+  main_source = "WlanInterfaceTest.py"
+  environments = display_envs
+}
+
+antlion_host_test("wlan_misc_scenario") {
+  main_source = "WlanMiscScenarioTest.py"
+  environments = display_ap_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":wlan_interface_test($host_toolchain)",
+    ":wlan_misc_scenario($host_toolchain)",
+  ]
+}
diff --git a/src/antlion/tests/wlan/misc/WlanInterfaceTest.py b/src/antlion/tests/wlan/misc/WlanInterfaceTest.py
index 352f704..0614901 100644
--- a/src/antlion/tests/wlan/misc/WlanInterfaceTest.py
+++ b/src/antlion/tests/wlan/misc/WlanInterfaceTest.py
@@ -14,28 +14,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import signals
-
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 
+from mobly import signals, test_runner
 
-class WlanInterfaceTest(WifiBaseTest):
 
+class WlanInterfaceTest(base_test.WifiBaseTest):
     def setup_class(self):
         super().setup_class()
-        dut = self.user_params.get('dut', None)
-        if dut:
-            if dut == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif dut == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an Fuchsia device
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
             self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
+            self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
 
     def test_destroy_iface(self):
         """Test that we don't error out when destroying the WLAN interface.
@@ -60,3 +58,7 @@
         if not self.dut.destroy_wlan_interface(wlan_interfaces[0]):
             raise signals.TestFailure("Failed to destroy WLAN interface")
         raise signals.TestPass("Success")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py b/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py
index ee4efa1..b5d17c5 100644
--- a/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py
+++ b/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py
@@ -14,17 +14,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import asserts
 from antlion import utils
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
 from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 
+from mobly import asserts, test_runner
 
-class WlanMiscScenarioTest(WifiBaseTest):
+
+class WlanMiscScenarioTest(base_test.WifiBaseTest):
     """Random scenario tests, usually to reproduce certain bugs, that do not
     fit into a specific test category, but should still be run in CI to catch
     regressions.
@@ -32,18 +33,18 @@
 
     def setup_class(self):
         super().setup_class()
-        dut = self.user_params.get('dut', None)
-        if dut:
-            if dut == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif dut == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an Fuchsia device
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
             self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
+            self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
+
         self.access_point = self.access_points[0]
 
     def teardown_class(self):
@@ -72,36 +73,46 @@
         """
         # Setup a WPA3 network
         wpa3_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=wpa3_ssid,
-                 security=Security(security_mode='wpa3',
-                                   password=generate_random_password('wpa3')))
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=wpa3_ssid,
+            security=Security(
+                security_mode="wpa3", password=generate_random_password("wpa3")
+            ),
+        )
         # Attempt to associate with wrong password, expecting failure
-        self.log.info('Attempting to associate WPA3 with wrong password.')
+        self.log.info("Attempting to associate WPA3 with wrong password.")
         asserts.assert_false(
-            self.dut.associate(wpa3_ssid,
-                               target_pwd='wrongpass',
-                               target_security='wpa3'),
-            'Associated with WPA3 network using the wrong password')
+            self.dut.associate(
+                wpa3_ssid, target_pwd="wrongpass", target_security="wpa3"
+            ),
+            "Associated with WPA3 network using the wrong password",
+        )
 
         self.access_point.stop_all_aps()
 
         # Setup a WPA2 Network
         wpa2_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
-        wpa2_password = generate_random_password('wpa2')
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=wpa2_ssid,
-                 security=Security(security_mode='wpa2',
-                                   password=wpa2_password))
+        wpa2_password = generate_random_password("wpa2")
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=wpa2_ssid,
+            security=Security(security_mode="wpa2", password=wpa2_password),
+        )
 
         # Attempt to associate, expecting success
-        self.log.info('Attempting to associate with WPA2 network.')
+        self.log.info("Attempting to associate with WPA2 network.")
         asserts.assert_true(
-            self.dut.associate(wpa2_ssid,
-                               target_pwd=wpa2_password,
-                               target_security='wpa2'),
-            'Failed to associate with WPA2 network after a WPA3 rejection.')
+            self.dut.associate(
+                wpa2_ssid, target_pwd=wpa2_password, target_security="wpa2"
+            ),
+            "Failed to associate with WPA2 network after a WPA3 rejection.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/BUILD.gn b/src/antlion/tests/wlan/performance/BUILD.gn
new file mode 100644
index 0000000..0fb75d9
--- /dev/null
+++ b/src/antlion/tests/wlan/performance/BUILD.gn
@@ -0,0 +1,46 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("channel_sweep_test") {
+  main_source = "ChannelSweepTest.py"
+  test_params = "channel_sweep_test_params.yaml"
+  environments = display_ap_iperf_envs
+}
+
+antlion_host_test("wlan_rvr_test") {
+  main_source = "WlanRvrTest.py"
+  test_params = "rvr_settings.yaml"
+  environments = display_ap_iperf_attenuator_envs
+}
+
+antlion_host_test("wlan_wmm_test") {
+  main_source = "WmmTest.py"
+
+  # Requires a second station and custom configuration. There are no available
+  # testbeds to support this toplogy. This will remain an at-desk test until an
+  # infra-hosted testbed matching this topology is supported.
+  environments = []
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":wlan_rvr_test($host_toolchain)",
+  ]
+}
+
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    # Running ChannelSweepTest is usually only necessary when verifying new WLAN
+    # firmware patches. Take it out of automation; it takes too long otherwise.
+    ":channel_sweep_test($host_toolchain)",
+    ":wlan_wmm_test($host_toolchain)",
+  ]
+}
diff --git a/src/antlion/tests/wlan/performance/ChannelSweepTest.py b/src/antlion/tests/wlan/performance/ChannelSweepTest.py
index 4c84213..db148e9 100644
--- a/src/antlion/tests/wlan/performance/ChannelSweepTest.py
+++ b/src/antlion/tests/wlan/performance/ChannelSweepTest.py
@@ -19,22 +19,22 @@
 
 from statistics import pstdev
 
-from antlion import asserts
-from antlion import context
-from antlion import utils
+from antlion import context, utils
 from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants
 from antlion.controllers.ap_lib.hostapd_security import Security
 from antlion.controllers.iperf_server import IPerfResult
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 
+from mobly import asserts, test_runner
+
 N_CAPABILITIES_DEFAULT = [
-    hostapd_constants.N_CAPABILITY_LDPC, hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_LDPC,
+    hostapd_constants.N_CAPABILITY_SGI20,
     hostapd_constants.N_CAPABILITY_SGI40,
     hostapd_constants.N_CAPABILITY_TX_STBC,
-    hostapd_constants.N_CAPABILITY_RX_STBC1
+    hostapd_constants.N_CAPABILITY_RX_STBC1,
 ]
 
 AC_CAPABILITIES_DEFAULT = [
@@ -45,7 +45,7 @@
     hostapd_constants.AC_CAPABILITY_RX_STBC_1,
     hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
     hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN
+    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
 ]
 
 DEFAULT_MIN_THROUGHPUT = 0
@@ -60,15 +60,15 @@
 TIME_TO_WAIT_FOR_COUNTRY_CODE = 10
 WEP_HEX_STRING_LENGTH = 10
 
-MEGABITS_PER_SECOND = 'Mbps'
+MEGABITS_PER_SECOND = "Mbps"
 
 
 def get_test_name(settings):
     """Retrieves the test_name value from test_settings"""
-    return settings.get('test_name')
+    return settings.get("test_name")
 
 
-class ChannelSweepTest(WifiBaseTest):
+class ChannelSweepTest(base_test.WifiBaseTest):
     """Tests channel performance and regulatory compliance..
 
     Testbed Requirement:
@@ -80,29 +80,28 @@
 
     def __init__(self, controllers):
         super().__init__(controllers)
-        if 'channel_sweep_test_params' in self.user_params:
+        if "channel_sweep_test_params" in self.user_params:
             self.time_to_wait_for_ip_addr = self.user_params[
-                'channel_sweep_test_params'].get(
-                    'time_to_wait_for_ip_addr',
-                    DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR)
+                "channel_sweep_test_params"
+            ].get("time_to_wait_for_ip_addr", DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR)
         else:
             self.time_to_wait_for_ip_addr = DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR
 
     def setup_class(self):
         super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
 
-        self.android_devices = getattr(self, 'android_devices', [])
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
+            self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
+
+        self.android_devices = getattr(self, "android_devices", [])
 
         self.access_point = self.access_points[0]
         self.access_point.stop_all_aps()
@@ -111,17 +110,19 @@
         self.iperf_client = None
 
         self.channel_sweep_test_params = self.user_params.get(
-            'channel_sweep_test_params', {})
+            "channel_sweep_test_params", {}
+        )
         # Allows users to skip the iperf throughput measurements, just verifying
         # association.
-        if not self.channel_sweep_test_params.get('skip_performance'):
+        if not self.channel_sweep_test_params.get("skip_performance"):
             try:
                 self.iperf_server = self.iperf_servers[0]
                 self.iperf_server.start()
             except AttributeError:
                 self.log.warn(
-                    'Missing iperf config. Throughput cannot be measured, so only '
-                    'association will be tested.')
+                    "Missing iperf config. Throughput cannot be measured, so only "
+                    "association will be tested."
+                )
 
             if hasattr(self, "iperf_clients") and self.iperf_clients:
                 self.iperf_client = self.iperf_clients[0]
@@ -133,8 +134,10 @@
     def teardown_class(self):
         super().teardown_class()
         output_path = context.get_current_context().get_base_output_path()
-        regulatory_save_path = '%s/ChannelSweepTest/%s' % (
-            output_path, "regulatory_results.txt")
+        regulatory_save_path = "%s/ChannelSweepTest/%s" % (
+            output_path,
+            "regulatory_results.txt",
+        )
         f = open(regulatory_save_path, "w")
         f.write(self.regulatory_results)
         f.close()
@@ -178,48 +181,53 @@
         Args:
             country_code: string, the 2 character country code to set
         """
-        self.log.info('Setting DUT country code to %s' % country_code)
+        self.log.info("Setting DUT country code to %s" % country_code)
         country_code_response = self.dut.device.sl4f.regulatory_region_lib.setRegion(
-            country_code)
-        if country_code_response.get('error'):
+            country_code
+        )
+        if country_code_response.get("error"):
             raise EnvironmentError(
-                'Failed to set country code (%s) on DUT. Error: %s' %
-                (country_code, country_code_response['error']))
+                "Failed to set country code (%s) on DUT. Error: %s"
+                % (country_code, country_code_response["error"])
+            )
 
-        self.log.info('Verifying DUT country code was correctly set to %s.' %
-                      country_code)
+        self.log.info(
+            "Verifying DUT country code was correctly set to %s." % country_code
+        )
         phy_ids_response = self.dut.device.sl4f.wlan_lib.wlanPhyIdList()
-        if phy_ids_response.get('error'):
-            raise ConnectionError('Failed to get phy ids from DUT. Error: %s' %
-                                  (country_code, phy_ids_response['error']))
+        if phy_ids_response.get("error"):
+            raise ConnectionError(
+                "Failed to get phy ids from DUT. Error: %s"
+                % (country_code, phy_ids_response["error"])
+            )
 
         end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
         while time.time() < end_time:
-            for id in phy_ids_response['result']:
-                get_country_response = self.dut.device.sl4f.wlan_lib.wlanGetCountry(
-                    id)
-                if get_country_response.get('error'):
+            for id in phy_ids_response["result"]:
+                get_country_response = self.dut.device.sl4f.wlan_lib.wlanGetCountry(id)
+                if get_country_response.get("error"):
                     raise ConnectionError(
-                        'Failed to query PHY ID (%s) for country. Error: %s' %
-                        (id, get_country_response['error']))
+                        "Failed to query PHY ID (%s) for country. Error: %s"
+                        % (id, get_country_response["error"])
+                    )
 
-                set_code = ''.join([
-                    chr(ascii_char)
-                    for ascii_char in get_country_response['result']
-                ])
+                set_code = "".join(
+                    [chr(ascii_char) for ascii_char in get_country_response["result"]]
+                )
                 if set_code != country_code:
                     self.log.debug(
-                        'PHY (id: %s) has incorrect country code set. '
-                        'Expected: %s, Got: %s' % (id, country_code, set_code))
+                        "PHY (id: %s) has incorrect country code set. "
+                        "Expected: %s, Got: %s" % (id, country_code, set_code)
+                    )
                     break
             else:
-                self.log.info('All PHYs have expected country code (%s)' %
-                              country_code)
+                self.log.info("All PHYs have expected country code (%s)" % country_code)
                 break
             time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
         else:
-            raise EnvironmentError('Failed to set DUT country code to %s.' %
-                                   country_code)
+            raise EnvironmentError(
+                "Failed to set DUT country code to %s." % country_code
+            )
 
     def setup_ap(self, channel, channel_bandwidth, security_profile=None):
         """Start network on AP with basic configuration.
@@ -244,37 +252,43 @@
             n_capabilities = N_CAPABILITIES_DEFAULT + [
                 hostapd_constants.N_CAPABILITY_HT20
             ]
-        elif (channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_40MHZ or
-              channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_80MHZ):
+        elif (
+            channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_40MHZ
+            or channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_80MHZ
+        ):
             if hostapd_config.ht40_plus_allowed(channel):
                 extended_channel = [hostapd_constants.N_CAPABILITY_HT40_PLUS]
             elif hostapd_config.ht40_minus_allowed(channel):
                 extended_channel = [hostapd_constants.N_CAPABILITY_HT40_MINUS]
             else:
-                raise ValueError('Invalid Channel: %s' % channel)
+                raise ValueError("Invalid Channel: %s" % channel)
             n_capabilities = N_CAPABILITIES_DEFAULT + extended_channel
         else:
-            raise ValueError('Invalid Bandwidth: %s' % channel_bandwidth)
+            raise ValueError("Invalid Bandwidth: %s" % channel_bandwidth)
         ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
         try:
-            setup_ap(access_point=self.access_point,
-                     profile_name='whirlwind',
-                     channel=channel,
-                     security=security_profile,
-                     n_capabilities=n_capabilities,
-                     ac_capabilities=None,
-                     force_wmm=True,
-                     ssid=ssid,
-                     vht_bandwidth=vht_bandwidth,
-                     setup_bridge=True)
+            setup_ap(
+                access_point=self.access_point,
+                profile_name="whirlwind",
+                channel=channel,
+                security=security_profile,
+                n_capabilities=n_capabilities,
+                ac_capabilities=None,
+                force_wmm=True,
+                ssid=ssid,
+                vht_bandwidth=vht_bandwidth,
+                setup_bridge=True,
+            )
         except Exception as err:
             raise ConnectionError(
-                'Failed to setup ap on channel: %s, channel bandwidth: %smhz. '
-                'Error: %s' % (channel, channel_bandwidth, err))
+                "Failed to setup ap on channel: %s, channel bandwidth: %smhz. "
+                "Error: %s" % (channel, channel_bandwidth, err)
+            )
         else:
             self.log.info(
-                'Network (ssid: %s) up on channel %s w/ channel bandwidth %smhz'
-                % (ssid, channel, channel_bandwidth))
+                "Network (ssid: %s) up on channel %s w/ channel bandwidth %smhz"
+                % (ssid, channel, channel_bandwidth)
+            )
 
         return ssid
 
@@ -305,26 +319,26 @@
                 device_addresses = device.get_interface_ip_addresses(interface)
             else:
                 device_addresses = device.get_interface_ip_addresses(
-                    device.test_interface)
+                    device.test_interface
+                )
 
-            if device_addresses['ipv4_private']:
-                for ip_addr in device_addresses['ipv4_private']:
+            if device_addresses["ipv4_private"]:
+                for ip_addr in device_addresses["ipv4_private"]:
                     if utils.ip_in_subnet(ip_addr, subnet):
                         return ip_addr
                     else:
                         self.log.debug(
-                            'Device has an ip address (%s), but it is not in '
-                            'subnet %s' % (ip_addr, subnet))
+                            "Device has an ip address (%s), but it is not in "
+                            "subnet %s" % (ip_addr, subnet)
+                        )
             else:
-                self.log.debug(
-                    'Device does not have a valid ip address. Retrying.')
+                self.log.debug("Device does not have a valid ip address. Retrying.")
             time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
-        raise ConnectionError('Device failed to get an ip address.')
+        raise ConnectionError("Device failed to get an ip address.")
 
-    def get_iperf_throughput(self,
-                             iperf_server_address,
-                             iperf_client_address,
-                             reverse=False):
+    def get_iperf_throughput(
+        self, iperf_server_address, iperf_client_address, reverse=False
+    ):
         """Run iperf between client and server and get the throughput.
 
         Args:
@@ -338,31 +352,37 @@
         """
         if reverse:
             self.log.info(
-                'Running IPerf traffic from server (%s) to dut (%s).' %
-                (iperf_server_address, iperf_client_address))
+                "Running IPerf traffic from server (%s) to dut (%s)."
+                % (iperf_server_address, iperf_client_address)
+            )
             iperf_results_file = self.iperf_client.start(
                 iperf_server_address,
-                '-i 1 -t 10 -R -J',
-                'channel_sweep_rx',
-                timeout=DEFAULT_IPERF_TIMEOUT)
+                "-i 1 -t 10 -R -J",
+                "channel_sweep_rx",
+                timeout=DEFAULT_IPERF_TIMEOUT,
+            )
         else:
             self.log.info(
-                'Running IPerf traffic from dut (%s) to server (%s).' %
-                (iperf_client_address, iperf_server_address))
+                "Running IPerf traffic from dut (%s) to server (%s)."
+                % (iperf_client_address, iperf_server_address)
+            )
             iperf_results_file = self.iperf_client.start(
                 iperf_server_address,
-                '-i 1 -t 10 -J',
-                'channel_sweep_tx',
-                timeout=DEFAULT_IPERF_TIMEOUT)
+                "-i 1 -t 10 -J",
+                "channel_sweep_tx",
+                timeout=DEFAULT_IPERF_TIMEOUT,
+            )
         if iperf_results_file:
             iperf_results = IPerfResult(
-                iperf_results_file, reporting_speed_units=MEGABITS_PER_SECOND)
+                iperf_results_file, reporting_speed_units=MEGABITS_PER_SECOND
+            )
             return iperf_results.avg_send_rate
         else:
             return IPERF_NO_THROUGHPUT_VALUE
 
-    def log_to_file_and_throughput_data(self, channel, channel_bandwidth,
-                                        tx_throughput, rx_throughput):
+    def log_to_file_and_throughput_data(
+        self, channel, channel_bandwidth, tx_throughput, rx_throughput
+    ):
         """Write performance info to csv file and to throughput data.
 
         Args:
@@ -371,20 +391,18 @@
             tx_throughput: float, throughput value from dut to iperf server
             rx_throughput: float, throughput value from iperf server to dut
         """
-        test_name = self.throughput_data['test']
+        test_name = self.throughput_data["test"]
         output_path = context.get_current_context().get_base_output_path()
-        log_path = '%s/ChannelSweepTest/%s' % (output_path, test_name)
+        log_path = "%s/ChannelSweepTest/%s" % (output_path, test_name)
         if not os.path.exists(log_path):
             os.makedirs(log_path)
-        log_file = '%s/%s_%smhz.csv' % (log_path, test_name, channel_bandwidth)
-        self.log.info('Writing IPerf results for %s to %s' %
-                      (test_name, log_file))
-        with open(log_file, 'a') as csv_file:
-            csv_file.write('%s,%s,%s\n' %
-                           (channel, tx_throughput, rx_throughput))
-        self.throughput_data['results'][str(channel)] = {
-            'tx_throughput': tx_throughput,
-            'rx_throughput': rx_throughput
+        log_file = "%s/%s_%smhz.csv" % (log_path, test_name, channel_bandwidth)
+        self.log.info("Writing IPerf results for %s to %s" % (test_name, log_file))
+        with open(log_file, "a") as csv_file:
+            csv_file.write("%s,%s,%s\n" % (channel, tx_throughput, rx_throughput))
+        self.throughput_data["results"][str(channel)] = {
+            "tx_throughput": tx_throughput,
+            "rx_throughput": rx_throughput,
         }
 
     def write_graph(self):
@@ -401,66 +419,89 @@
             from bokeh.plotting import output_file
             from bokeh.plotting import save
         except ImportError as e:
-            self.log.warn("bokeh is not installed: skipping creation of graphs. "
-                          "Note CSV files are still available. If graphs are "
-                          "desired, install antlion with the \"bokeh\" feature.")
+            self.log.warn(
+                "bokeh is not installed: skipping creation of graphs. "
+                "Note CSV files are still available. If graphs are "
+                'desired, install antlion with the "bokeh" feature.'
+            )
             return
 
         output_path = context.get_current_context().get_base_output_path()
-        test_name = self.throughput_data['test']
-        channel_bandwidth = self.throughput_data['channel_bandwidth']
-        output_file_name = '%s/ChannelSweepTest/%s/%s_%smhz.html' % (
-            output_path, test_name, test_name, channel_bandwidth)
+        test_name = self.throughput_data["test"]
+        channel_bandwidth = self.throughput_data["channel_bandwidth"]
+        output_file_name = "%s/ChannelSweepTest/%s/%s_%smhz.html" % (
+            output_path,
+            test_name,
+            test_name,
+            channel_bandwidth,
+        )
         output_file(output_file_name)
         channels = []
         tx_throughputs = []
         rx_throughputs = []
-        for channel in self.throughput_data['results']:
+        for channel in self.throughput_data["results"]:
             channels.append(str(channel))
             tx_throughputs.append(
-                self.throughput_data['results'][channel]['tx_throughput'])
+                self.throughput_data["results"][channel]["tx_throughput"]
+            )
             rx_throughputs.append(
-                self.throughput_data['results'][channel]['rx_throughput'])
+                self.throughput_data["results"][channel]["rx_throughput"]
+            )
         channel_vs_throughput_data = ColumnDataSource(
-            data=dict(channels=channels,
-                      tx_throughput=tx_throughputs,
-                      rx_throughput=rx_throughputs))
-        TOOLTIPS = [('Channel', '@channels'),
-                    ('TX_Throughput', '@tx_throughput'),
-                    ('RX_Throughput', '@rx_throughput')]
-        channel_vs_throughput_graph = figure(title='Channels vs. Throughput',
-                                             x_axis_label='Channels',
-                                             x_range=channels,
-                                             y_axis_label='Throughput',
-                                             tooltips=TOOLTIPS)
-        channel_vs_throughput_graph.sizing_mode = 'stretch_both'
-        channel_vs_throughput_graph.title.align = 'center'
-        channel_vs_throughput_graph.line('channels',
-                                         'tx_throughput',
-                                         source=channel_vs_throughput_data,
-                                         line_width=2,
-                                         line_color='blue',
-                                         legend_label='TX_Throughput')
-        channel_vs_throughput_graph.circle('channels',
-                                           'tx_throughput',
-                                           source=channel_vs_throughput_data,
-                                           size=GRAPH_CIRCLE_SIZE,
-                                           color='blue')
-        channel_vs_throughput_graph.line('channels',
-                                         'rx_throughput',
-                                         source=channel_vs_throughput_data,
-                                         line_width=2,
-                                         line_color='red',
-                                         legend_label='RX_Throughput')
-        channel_vs_throughput_graph.circle('channels',
-                                           'rx_throughput',
-                                           source=channel_vs_throughput_data,
-                                           size=GRAPH_CIRCLE_SIZE,
-                                           color='red')
+            data=dict(
+                channels=channels,
+                tx_throughput=tx_throughputs,
+                rx_throughput=rx_throughputs,
+            )
+        )
+        TOOLTIPS = [
+            ("Channel", "@channels"),
+            ("TX_Throughput", "@tx_throughput"),
+            ("RX_Throughput", "@rx_throughput"),
+        ]
+        channel_vs_throughput_graph = figure(
+            title="Channels vs. Throughput",
+            x_axis_label="Channels",
+            x_range=channels,
+            y_axis_label="Throughput",
+            tooltips=TOOLTIPS,
+        )
+        channel_vs_throughput_graph.sizing_mode = "stretch_both"
+        channel_vs_throughput_graph.title.align = "center"
+        channel_vs_throughput_graph.line(
+            "channels",
+            "tx_throughput",
+            source=channel_vs_throughput_data,
+            line_width=2,
+            line_color="blue",
+            legend_label="TX_Throughput",
+        )
+        channel_vs_throughput_graph.circle(
+            "channels",
+            "tx_throughput",
+            source=channel_vs_throughput_data,
+            size=GRAPH_CIRCLE_SIZE,
+            color="blue",
+        )
+        channel_vs_throughput_graph.line(
+            "channels",
+            "rx_throughput",
+            source=channel_vs_throughput_data,
+            line_width=2,
+            line_color="red",
+            legend_label="RX_Throughput",
+        )
+        channel_vs_throughput_graph.circle(
+            "channels",
+            "rx_throughput",
+            source=channel_vs_throughput_data,
+            size=GRAPH_CIRCLE_SIZE,
+            color="red",
+        )
 
         channel_vs_throughput_graph.legend.location = "top_left"
         graph_file = save([channel_vs_throughput_graph])
-        self.log.info('Saved graph to %s' % graph_file)
+        self.log.info("Saved graph to %s" % graph_file)
 
     def verify_standard_deviation(self, max_std_dev):
         """Verifies the standard deviation of the throughput across the channels
@@ -476,34 +517,46 @@
         # If performance measurement is skipped
         if not self.iperf_server:
             return
-        self.log.info('Verifying standard deviation across channels does not '
-                      'exceed max standard deviation of %s Mb/s' % max_std_dev)
+        self.log.info(
+            "Verifying standard deviation across channels does not "
+            "exceed max standard deviation of %s Mb/s" % max_std_dev
+        )
         tx_values = []
         rx_values = []
-        for channel in self.throughput_data['results']:
-            if self.throughput_data['results'][channel][
-                    'tx_throughput'] is not None:
+        for channel in self.throughput_data["results"]:
+            if self.throughput_data["results"][channel]["tx_throughput"] is not None:
                 tx_values.append(
-                    self.throughput_data['results'][channel]['tx_throughput'])
-            if self.throughput_data['results'][channel][
-                    'rx_throughput'] is not None:
+                    self.throughput_data["results"][channel]["tx_throughput"]
+                )
+            if self.throughput_data["results"][channel]["rx_throughput"] is not None:
                 rx_values.append(
-                    self.throughput_data['results'][channel]['rx_throughput'])
+                    self.throughput_data["results"][channel]["rx_throughput"]
+                )
         tx_std_dev = pstdev(tx_values)
         rx_std_dev = pstdev(rx_values)
         if tx_std_dev > max_std_dev or rx_std_dev > max_std_dev:
             asserts.fail(
-                'With %smhz channel bandwidth, throughput standard '
-                'deviation (tx: %s Mb/s, rx: %s Mb/s) exceeds max standard '
-                'deviation (%s Mb/s).' %
-                (self.throughput_data['channel_bandwidth'], tx_std_dev,
-                 rx_std_dev, max_std_dev))
+                "With %smhz channel bandwidth, throughput standard "
+                "deviation (tx: %s Mb/s, rx: %s Mb/s) exceeds max standard "
+                "deviation (%s Mb/s)."
+                % (
+                    self.throughput_data["channel_bandwidth"],
+                    tx_std_dev,
+                    rx_std_dev,
+                    max_std_dev,
+                )
+            )
         else:
             asserts.explicit_pass(
-                'Throughput standard deviation (tx: %s Mb/s, rx: %s Mb/s) '
-                'with %smhz channel bandwidth does not exceed maximum (%s Mb/s).'
-                % (tx_std_dev, rx_std_dev,
-                   self.throughput_data['channel_bandwidth'], max_std_dev))
+                "Throughput standard deviation (tx: %s Mb/s, rx: %s Mb/s) "
+                "with %smhz channel bandwidth does not exceed maximum (%s Mb/s)."
+                % (
+                    tx_std_dev,
+                    rx_std_dev,
+                    self.throughput_data["channel_bandwidth"],
+                    max_std_dev,
+                )
+            )
 
     def run_channel_performance_tests(self, settings):
         """Test function for running channel performance tests. Used by both
@@ -557,18 +610,16 @@
                         country_code='US',
                         base_test_name='test_us'))
         """
-        test_channels = settings['test_channels']
-        test_channel_bandwidth = settings['test_channel_bandwidth']
-        test_security = settings.get('test_security', None)
-        test_name = settings.get('test_name', self.test_name)
-        base_test_name = settings.get('base_test_name', 'test')
-        min_tx_throughput = settings.get('min_tx_throughput',
-                                         DEFAULT_MIN_THROUGHPUT)
-        min_rx_throughput = settings.get('min_rx_throughput',
-                                         DEFAULT_MIN_THROUGHPUT)
-        max_std_dev = settings.get('max_std_dev', DEFAULT_MAX_STD_DEV)
-        country_code = settings.get('country_code')
-        country_name = settings.get('country_name')
+        test_channels = settings["test_channels"]
+        test_channel_bandwidth = settings["test_channel_bandwidth"]
+        test_security = settings.get("test_security", None)
+        test_name = settings.get("test_name", self.test_name)
+        base_test_name = settings.get("base_test_name", "test")
+        min_tx_throughput = settings.get("min_tx_throughput", DEFAULT_MIN_THROUGHPUT)
+        min_rx_throughput = settings.get("min_rx_throughput", DEFAULT_MIN_THROUGHPUT)
+        max_std_dev = settings.get("max_std_dev", DEFAULT_MAX_STD_DEV)
+        country_code = settings.get("country_code")
+        country_name = settings.get("country_name")
         country_label = None
 
         if country_code:
@@ -576,32 +627,36 @@
             self.set_dut_country_code(country_code)
         elif country_name:
             country_label = country_name
-            code = hostapd_constants.COUNTRY_CODE[country_name]['country_code']
+            code = hostapd_constants.COUNTRY_CODE[country_name]["country_code"]
             self.set_dut_country_code(code)
 
         self.throughput_data = {
-            'test': test_name,
-            'channel_bandwidth': test_channel_bandwidth,
-            'results': {}
+            "test": test_name,
+            "channel_bandwidth": test_channel_bandwidth,
+            "results": {},
         }
         test_list = []
         for channel in test_channels:
-            sub_test_name = 'test_%schannel_%s_%smhz_%s_performance' % (
-                '%s_' % country_label if country_label else '', channel,
+            sub_test_name = "test_%schannel_%s_%smhz_%s_performance" % (
+                "%s_" % country_label if country_label else "",
+                channel,
                 test_channel_bandwidth,
-                test_security if test_security else 'open')
-            test_list.append({
-                'test_name': sub_test_name,
-                'channel': int(channel),
-                'channel_bandwidth': int(test_channel_bandwidth),
-                'security': test_security,
-                'min_tx_throughput': min_tx_throughput,
-                'min_rx_throughput': min_rx_throughput
-            })
-        self.run_generated_testcases(self.get_channel_performance,
-                                     settings=test_list,
-                                     name_func=get_test_name)
-        self.log.info('Channel tests completed.')
+                test_security if test_security else "open",
+            )
+            test_list.append(
+                {
+                    "test_name": sub_test_name,
+                    "channel": int(channel),
+                    "channel_bandwidth": int(test_channel_bandwidth),
+                    "security": test_security,
+                    "min_tx_throughput": min_tx_throughput,
+                    "min_rx_throughput": min_rx_throughput,
+                }
+            )
+        self.run_generated_testcases(
+            self.get_channel_performance, settings=test_list, name_func=get_test_name
+        )
+        self.log.info("Channel tests completed.")
         self.write_graph()
         self.verify_standard_deviation(max_std_dev)
 
@@ -624,73 +679,84 @@
             TestFailure, if throughput (either direction) is less than
                 the directions given minimum throughput threshold.
         """
-        channel = settings['channel']
-        channel_bandwidth = settings['channel_bandwidth']
-        security = settings['security']
-        test_name = settings['test_name']
-        min_tx_throughput = settings['min_tx_throughput']
-        min_rx_throughput = settings['min_rx_throughput']
+        channel = settings["channel"]
+        channel_bandwidth = settings["channel_bandwidth"]
+        security = settings["security"]
+        test_name = settings["test_name"]
+        min_tx_throughput = settings["min_tx_throughput"]
+        min_rx_throughput = settings["min_rx_throughput"]
         if security:
             if security == hostapd_constants.WEP_STRING:
                 password = utils.rand_hex_str(WEP_HEX_STRING_LENGTH)
             else:
-                password = utils.rand_ascii_str(
-                    hostapd_constants.MIN_WPA_PSK_LENGTH)
-            security_profile = Security(security_mode=security,
-                                        password=password)
-            target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                security)
+                password = utils.rand_ascii_str(hostapd_constants.MIN_WPA_PSK_LENGTH)
+            security_profile = Security(security_mode=security, password=password)
+            target_security = (
+                hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                    security
+                )
+            )
         else:
             password = None
             security_profile = None
             target_security = None
         ssid = self.setup_ap(channel, channel_bandwidth, security_profile)
-        associated = self.dut.associate(ssid,
-                                        target_pwd=password,
-                                        target_security=target_security)
+        associated = self.dut.associate(
+            ssid, target_pwd=password, target_security=target_security
+        )
         if not associated:
             if self.iperf_server:
-                self.log_to_file_and_throughput_data(channel,
-                                                     channel_bandwidth, None,
-                                                     None)
-            asserts.fail('Device failed to associate with network %s' % ssid)
-        self.log.info('DUT (%s) connected to network %s.' %
-                      (self.dut.device.ip, ssid))
+                self.log_to_file_and_throughput_data(
+                    channel, channel_bandwidth, None, None
+                )
+            asserts.fail("Device failed to associate with network %s" % ssid)
+        self.log.info("DUT (%s) connected to network %s." % (self.dut.device.ip, ssid))
         if self.iperf_server:
             self.iperf_server.renew_test_interface_ip_address()
             self.log.info(
-                'Getting ip address for iperf server. Will retry for %s seconds.'
-                % self.time_to_wait_for_ip_addr)
+                "Getting ip address for iperf server. Will retry for %s seconds."
+                % self.time_to_wait_for_ip_addr
+            )
             iperf_server_address = self.get_and_verify_iperf_address(
-                channel, self.iperf_server)
+                channel, self.iperf_server
+            )
             self.log.info(
-                'Getting ip address for DUT. Will retry for %s seconds.' %
-                self.time_to_wait_for_ip_addr)
+                "Getting ip address for DUT. Will retry for %s seconds."
+                % self.time_to_wait_for_ip_addr
+            )
             iperf_client_address = self.get_and_verify_iperf_address(
-                channel, self.dut.device, self.iperf_client.test_interface)
-            tx_throughput = self.get_iperf_throughput(iperf_server_address,
-                                                      iperf_client_address)
-            rx_throughput = self.get_iperf_throughput(iperf_server_address,
-                                                      iperf_client_address,
-                                                      reverse=True)
-            self.log_to_file_and_throughput_data(channel, channel_bandwidth,
-                                                 tx_throughput, rx_throughput)
-            self.log.info('Throughput (tx, rx): (%s Mb/s, %s Mb/s), '
-                          'Minimum threshold (tx, rx): (%s Mb/s, %s Mb/s)' %
-                          (tx_throughput, rx_throughput, min_tx_throughput,
-                           min_rx_throughput))
+                channel, self.dut.device, self.iperf_client.test_interface
+            )
+            tx_throughput = self.get_iperf_throughput(
+                iperf_server_address, iperf_client_address
+            )
+            rx_throughput = self.get_iperf_throughput(
+                iperf_server_address, iperf_client_address, reverse=True
+            )
+            self.log_to_file_and_throughput_data(
+                channel, channel_bandwidth, tx_throughput, rx_throughput
+            )
+            self.log.info(
+                "Throughput (tx, rx): (%s Mb/s, %s Mb/s), "
+                "Minimum threshold (tx, rx): (%s Mb/s, %s Mb/s)"
+                % (tx_throughput, rx_throughput, min_tx_throughput, min_rx_throughput)
+            )
             base_message = (
-                'Actual throughput (on channel: %s, channel bandwidth: '
-                '%s, security: %s)' % (channel, channel_bandwidth, security))
-            if (not tx_throughput or not rx_throughput
-                    or tx_throughput < min_tx_throughput
-                    or rx_throughput < min_rx_throughput):
-                asserts.fail('%s below the minimum threshold.' % base_message)
-            asserts.explicit_pass('%s above the minimum threshold.' %
-                                  base_message)
+                "Actual throughput (on channel: %s, channel bandwidth: "
+                "%s, security: %s)" % (channel, channel_bandwidth, security)
+            )
+            if (
+                not tx_throughput
+                or not rx_throughput
+                or tx_throughput < min_tx_throughput
+                or rx_throughput < min_rx_throughput
+            ):
+                asserts.fail("%s below the minimum threshold." % base_message)
+            asserts.explicit_pass("%s above the minimum threshold." % base_message)
         else:
             asserts.explicit_pass(
-                'Association test pass. No throughput measurement taken.')
+                "Association test pass. No throughput measurement taken."
+            )
 
     def verify_regulatory_compliance(self, settings):
         """Test function for regulatory compliance tests. Verify device complies
@@ -711,335 +777,402 @@
                     parent test case from the config file. In explicit tests,
                     this is not necessary.
         """
-        country_name = settings.get('country_name')
-        country_code = settings.get('country_code')
+        country_name = settings.get("country_name")
+        country_code = settings.get("country_code")
         if not (country_code or country_name):
-            raise ValueError('No country code or name provided.')
+            raise ValueError("No country code or name provided.")
 
-        test_channels = settings.get('test_channels',
-                                     hostapd_constants.ALL_CHANNELS)
-        allowed_channels = settings['allowed_channels']
+        test_channels = settings.get("test_channels", hostapd_constants.ALL_CHANNELS)
+        allowed_channels = settings["allowed_channels"]
 
-        base_test_name = settings.get('base_test_name', 'test_compliance')
+        base_test_name = settings.get("base_test_name", "test_compliance")
 
         if country_code:
             code = country_code
         else:
-            code = hostapd_constants.COUNTRY_CODE[country_name]['country_code']
+            code = hostapd_constants.COUNTRY_CODE[country_name]["country_code"]
 
         self.set_dut_country_code(code)
 
         test_list = []
         for channel in test_channels:
             for channel_bandwidth in test_channels[channel]:
-                sub_test_name = '%s_channel_%s_%smhz' % (
-                    base_test_name, channel, channel_bandwidth)
-                should_associate = (channel in allowed_channels
-                                    and channel_bandwidth
-                                    in allowed_channels[channel])
+                sub_test_name = "%s_channel_%s_%smhz" % (
+                    base_test_name,
+                    channel,
+                    channel_bandwidth,
+                )
+                should_associate = (
+                    channel in allowed_channels
+                    and channel_bandwidth in allowed_channels[channel]
+                )
                 # Note: these int conversions because when these tests are
                 # imported via JSON, they may be strings since the channels
                 # will be keys. This makes the json/list test_channels param
                 # behave exactly like the in code dict/set test_channels.
-                test_list.append({
-                    'country_code': code,
-                    'channel': int(channel),
-                    'channel_bandwidth': int(channel_bandwidth),
-                    'should_associate': should_associate,
-                    'test_name': sub_test_name
-                })
-        self.run_generated_testcases(test_func=self.verify_channel_compliance,
-                                     settings=test_list,
-                                     name_func=get_test_name)
+                test_list.append(
+                    {
+                        "country_code": code,
+                        "channel": int(channel),
+                        "channel_bandwidth": int(channel_bandwidth),
+                        "should_associate": should_associate,
+                        "test_name": sub_test_name,
+                    }
+                )
+        self.run_generated_testcases(
+            test_func=self.verify_channel_compliance,
+            settings=test_list,
+            name_func=get_test_name,
+        )
 
     def verify_channel_compliance(self, settings):
         """Verify device complies with provided regulatory requirements for a
-        specific channel and channel bandwidth. Run with generated test cases
-        in the verify_regulatory_compliance parent test.
-_
-        Args:
-            settings: see verify_regulatory_compliance`
+                specific channel and channel bandwidth. Run with generated test cases
+                in the verify_regulatory_compliance parent test.
+        _
+                Args:
+                    settings: see verify_regulatory_compliance`
         """
-        channel = settings['channel']
-        channel_bandwidth = settings['channel_bandwidth']
-        code = settings['country_code']
-        should_associate = settings['should_associate']
+        channel = settings["channel"]
+        channel_bandwidth = settings["channel_bandwidth"]
+        code = settings["country_code"]
+        should_associate = settings["should_associate"]
 
         ssid = self.setup_ap(channel, channel_bandwidth)
 
         self.log.info(
-            'Attempting to associate with network (%s) on channel %s @ %smhz. '
-            'Expected behavior: %s' %
-            (ssid, channel, channel_bandwidth, 'Device should associate'
-             if should_associate else 'Device should NOT associate.'))
+            "Attempting to associate with network (%s) on channel %s @ %smhz. "
+            "Expected behavior: %s"
+            % (
+                ssid,
+                channel,
+                channel_bandwidth,
+                "Device should associate"
+                if should_associate
+                else "Device should NOT associate.",
+            )
+        )
 
         associated = self.dut.associate(ssid)
 
         regulatory_result_marker = "REGTRACKER: %s,%s,%s,%s,%s" % (
-            code, channel, '2.4' if channel < 36 else '5', channel_bandwidth,
-            'c' if associated else 'nc')
+            code,
+            channel,
+            "2.4" if channel < 36 else "5",
+            channel_bandwidth,
+            "c" if associated else "nc",
+        )
         self.regulatory_results += regulatory_result_marker + "\n"
         self.log.info(regulatory_result_marker)
 
         if associated == should_associate:
             asserts.explicit_pass(
-                'Device complied with %s regulatory requirement for channel %s '
-                ' with channel bandwidth %smhz. %s' %
-                (code, channel, channel_bandwidth,
-                 'Associated.' if associated else 'Refused to associate.'))
+                "Device complied with %s regulatory requirement for channel %s "
+                " with channel bandwidth %smhz. %s"
+                % (
+                    code,
+                    channel,
+                    channel_bandwidth,
+                    "Associated." if associated else "Refused to associate.",
+                )
+            )
         else:
             asserts.fail(
-                'Device failed compliance with regulatory domain %s for '
-                'channel %s with channel bandwidth %smhz. Expected: %s, Got: %s'
-                % (code, channel, channel_bandwidth, 'Should associate'
-                   if should_associate else 'Should not associate',
-                   'Associated' if associated else 'Did not associate'))
+                "Device failed compliance with regulatory domain %s for "
+                "channel %s with channel bandwidth %smhz. Expected: %s, Got: %s"
+                % (
+                    code,
+                    channel,
+                    channel_bandwidth,
+                    "Should associate" if should_associate else "Should not associate",
+                    "Associated" if associated else "Did not associate",
+                )
+            )
 
     # Helper functions to allow explicit tests throughput and standard deviation
     # thresholds to be passed in via config.
     def _get_min_tx_throughput(self, test_name):
-        return self.user_params.get('channel_sweep_test_params',
-                                    {}).get(test_name,
-                                            {}).get('min_tx_throughput',
-                                                    DEFAULT_MIN_THROUGHPUT)
+        return (
+            self.user_params.get("channel_sweep_test_params", {})
+            .get(test_name, {})
+            .get("min_tx_throughput", DEFAULT_MIN_THROUGHPUT)
+        )
 
     def _get_min_rx_throughput(self, test_name):
-        return self.user_params.get('channel_sweep_test_params',
-                                    {}).get(test_name,
-                                            {}).get('min_rx_throughput',
-                                                    DEFAULT_MIN_THROUGHPUT)
+        return (
+            self.user_params.get("channel_sweep_test_params", {})
+            .get(test_name, {})
+            .get("min_rx_throughput", DEFAULT_MIN_THROUGHPUT)
+        )
 
     def _get_max_std_dev(self, test_name):
-        return self.user_params.get('channel_sweep_test_params',
-                                    {}).get(test_name,
-                                            {}).get('min_std_dev',
-                                                    DEFAULT_MAX_STD_DEV)
+        return (
+            self.user_params.get("channel_sweep_test_params", {})
+            .get(test_name, {})
+            .get("min_std_dev", DEFAULT_MAX_STD_DEV)
+        )
 
     # Channel Performance of US Channels: 570 Test Cases
     # 36 Test Cases
     def test_us_20mhz_open_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G,
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 35 Test Cases
     def test_us_40mhz_open_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 24 Test Cases
     def test_us_80mhz_open_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 36 Test Cases
     def test_us_20mhz_wep_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WEP_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G,
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
+                test_security=hostapd_constants.WEP_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 35 Test Cases
     def test_us_40mhz_wep_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WEP_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
+                test_security=hostapd_constants.WEP_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 24 Test Cases
     def test_us_80mhz_wep_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WEP_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
+                test_security=hostapd_constants.WEP_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 36 Test Cases
     def test_us_20mhz_wpa_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WPA_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G,
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
+                test_security=hostapd_constants.WPA_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 35 Test Cases
     def test_us_40mhz_wpa_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WPA_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
+                test_security=hostapd_constants.WPA_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 24 Test Cases
     def test_us_80mhz_wpa_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WPA_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
+                test_security=hostapd_constants.WPA_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 36 Test Cases
     def test_us_20mhz_wpa2_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WPA2_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G,
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
+                test_security=hostapd_constants.WPA2_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 35 Test Cases
     def test_us_40mhz_wpa2_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WPA2_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
+                test_security=hostapd_constants.WPA2_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 24 Test Cases
     def test_us_80mhz_wpa2_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WPA2_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
+                test_security=hostapd_constants.WPA2_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 36 Test Cases
     def test_us_20mhz_wpa_wpa2_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WPA_MIXED_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G,
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
+                test_security=hostapd_constants.WPA_MIXED_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 35 Test Cases
     def test_us_40mhz_wpa_wpa2_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WPA_MIXED_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
+                test_security=hostapd_constants.WPA_MIXED_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 24 Test Cases
     def test_us_80mhz_wpa_wpa2_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WPA_MIXED_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
+                test_security=hostapd_constants.WPA_MIXED_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 36 Test Cases
     def test_us_20mhz_wpa3_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WPA3_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G,
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
+                test_security=hostapd_constants.WPA3_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 35 Test Cases
     def test_us_40mhz_wpa3_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WPA3_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_2G
+                + hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
+                test_security=hostapd_constants.WPA3_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     # 24 Test Cases
     def test_us_80mhz_wpa3_channel_performance(self):
         self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WPA3_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
+            dict(
+                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
+                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
+                test_security=hostapd_constants.WPA3_STRING,
+                base_test_name=self.test_name,
+                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
+                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
+                max_std_dev=self._get_max_std_dev(self.test_name),
+            )
+        )
 
     def test_channel_performance_debug(self):
         """Run channel performance test cases from the ACTS config file.
@@ -1063,14 +1196,18 @@
 
         """
         asserts.skip_if(
-            'debug_channel_performance_tests'
-            not in self.user_params.get('channel_sweep_test_params', {}),
-            'No custom channel performance tests provided in config.')
-        base_tests = self.user_params['channel_sweep_test_params'][
-            'debug_channel_performance_tests']
-        self.run_generated_testcases(self.run_channel_performance_tests,
-                                     settings=base_tests,
-                                     name_func=get_test_name)
+            "debug_channel_performance_tests"
+            not in self.user_params.get("channel_sweep_test_params", {}),
+            "No custom channel performance tests provided in config.",
+        )
+        base_tests = self.user_params["channel_sweep_test_params"][
+            "debug_channel_performance_tests"
+        ]
+        self.run_generated_testcases(
+            self.run_channel_performance_tests,
+            settings=base_tests,
+            name_func=get_test_name,
+        )
 
     def test_regulatory_compliance(self):
         """Run regulatory compliance test case from the ACTS config file.
@@ -1096,11 +1233,22 @@
         }
         """
         asserts.skip_if(
-            'regulatory_compliance_tests'
-            not in self.user_params.get('channel_sweep_test_params', {}),
-            'No custom regulatory compliance tests provided in config.')
-        base_tests = self.user_params['channel_sweep_test_params'][
-            'regulatory_compliance_tests']
-        self.run_generated_testcases(self.verify_regulatory_compliance,
-                                     settings=base_tests,
-                                     name_func=get_test_name)
+            "regulatory_compliance_tests"
+            not in self.user_params.get("channel_sweep_test_params", {}),
+            "No custom regulatory compliance tests provided in config.",
+        )
+
+        # TODO(http://b/280442689): Add "supported_country_codes" and
+        # "unsupported_channels" to test params
+        base_tests = self.user_params["channel_sweep_test_params"][
+            "regulatory_compliance_tests"
+        ]
+        self.run_generated_testcases(
+            self.verify_regulatory_compliance,
+            settings=base_tests,
+            name_func=get_test_name,
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/WlanRvrTest.py b/src/antlion/tests/wlan/performance/WlanRvrTest.py
index 0c11c4b..ad97221 100644
--- a/src/antlion/tests/wlan/performance/WlanRvrTest.py
+++ b/src/antlion/tests/wlan/performance/WlanRvrTest.py
@@ -17,7 +17,7 @@
 import time
 import logging
 
-from antlion import asserts, context
+from antlion import context
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib.radvd import Radvd
@@ -25,14 +25,16 @@
 from antlion.controllers.ap_lib.hostapd_security import Security
 from antlion.controllers.attenuator import get_attenuators_for_device
 from antlion.controllers.iperf_server import IPerfResult
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
+from antlion.test_utils.wifi import base_test
 from antlion.utils import rand_ascii_str
 
-AP_11ABG_PROFILE_NAME = 'whirlwind_11ag_legacy'
-REPORTING_SPEED_UNITS = 'Mbps'
+from mobly import asserts, test_runner
 
-RVR_GRAPH_SUMMARY_FILE = 'rvr_summary.html'
+AP_11ABG_PROFILE_NAME = "whirlwind_11ag_legacy"
+REPORTING_SPEED_UNITS = "Mbps"
+
+RVR_GRAPH_SUMMARY_FILE = "rvr_summary.html"
 
 DAD_TIMEOUT_SEC = 30
 
@@ -46,41 +48,45 @@
         graph_data: A dictionary of the data to be graphed.
     Returns:
         A list of bokeh graph objects.
-        """
+    """
     try:
         from bokeh.plotting import ColumnDataSource
         from bokeh.plotting import figure
         from bokeh.plotting import output_file
         from bokeh.plotting import save
     except ImportError as e:
-        logging.warn("bokeh is not installed: skipping creation of graphs. "
-                     "Note CSV files are still available. If graphs are "
-                     "desired, install antlion with the \"bokeh\" feature.")
+        logging.warn(
+            "bokeh is not installed: skipping creation of graphs. "
+            "Note CSV files are still available. If graphs are "
+            'desired, install antlion with the "bokeh" feature.'
+        )
         return []
 
-    output_file('%srvr_throughput_vs_attn_%s.html' % (graph_path, test_name),
-                title=test_name)
-    throughput_vs_attn_data = ColumnDataSource(data=dict(
-        relative_attn=graph_data['throughput_vs_attn']['relative_attn'],
-        throughput=graph_data['throughput_vs_attn']['throughput']))
-    TOOLTIPS = [("Attenuation", "@relative_attn"),
-                ("Throughput", "@throughput")]
+    output_file(
+        "%srvr_throughput_vs_attn_%s.html" % (graph_path, test_name), title=test_name
+    )
+    throughput_vs_attn_data = ColumnDataSource(
+        data=dict(
+            relative_attn=graph_data["throughput_vs_attn"]["relative_attn"],
+            throughput=graph_data["throughput_vs_attn"]["throughput"],
+        )
+    )
+    TOOLTIPS = [("Attenuation", "@relative_attn"), ("Throughput", "@throughput")]
     throughput_vs_attn_graph = figure(
         title="Throughput vs Relative Attenuation (Test Case: %s)" % test_name,
-        x_axis_label=graph_data['throughput_vs_attn']['x_label'],
-        y_axis_label=graph_data['throughput_vs_attn']['y_label'],
-        x_range=graph_data['throughput_vs_attn']['relative_attn'],
-        tooltips=TOOLTIPS)
-    throughput_vs_attn_graph.sizing_mode = 'stretch_width'
-    throughput_vs_attn_graph.title.align = 'center'
-    throughput_vs_attn_graph.line('relative_attn',
-                                  'throughput',
-                                  source=throughput_vs_attn_data,
-                                  line_width=2)
-    throughput_vs_attn_graph.circle('relative_attn',
-                                    'throughput',
-                                    source=throughput_vs_attn_data,
-                                    size=10)
+        x_axis_label=graph_data["throughput_vs_attn"]["x_label"],
+        y_axis_label=graph_data["throughput_vs_attn"]["y_label"],
+        x_range=graph_data["throughput_vs_attn"]["relative_attn"],
+        tooltips=TOOLTIPS,
+    )
+    throughput_vs_attn_graph.sizing_mode = "stretch_width"
+    throughput_vs_attn_graph.title.align = "center"
+    throughput_vs_attn_graph.line(
+        "relative_attn", "throughput", source=throughput_vs_attn_data, line_width=2
+    )
+    throughput_vs_attn_graph.circle(
+        "relative_attn", "throughput", source=throughput_vs_attn_data, size=10
+    )
     save([throughput_vs_attn_graph])
     return [throughput_vs_attn_graph]
 
@@ -92,19 +98,25 @@
         csv_path: Where to put the csv file.
         csv_data: A dictionary of the data to be put in the csv file.
     """
-    csv_file_name = '%srvr_throughput_vs_attn_%s.csv' % (csv_path, test_name)
-    throughput = csv_data['throughput_vs_attn']['throughput']
-    relative_attn = csv_data['throughput_vs_attn']['relative_attn']
-    with open(csv_file_name, 'w+') as csv_fileId:
-        csv_fileId.write('%s,%s\n' %
-                         (csv_data['throughput_vs_attn']['x_label'],
-                          csv_data['throughput_vs_attn']['y_label']))
+    csv_file_name = "%srvr_throughput_vs_attn_%s.csv" % (csv_path, test_name)
+    throughput = csv_data["throughput_vs_attn"]["throughput"]
+    relative_attn = csv_data["throughput_vs_attn"]["relative_attn"]
+    with open(csv_file_name, "w+") as csv_fileId:
+        csv_fileId.write(
+            "%s,%s\n"
+            % (
+                csv_data["throughput_vs_attn"]["x_label"],
+                csv_data["throughput_vs_attn"]["y_label"],
+            )
+        )
         for csv_loop_counter in range(0, len(relative_attn)):
-            csv_fileId.write('%s,%s\n' % (int(relative_attn[csv_loop_counter]),
-                                          throughput[csv_loop_counter]))
+            csv_fileId.write(
+                "%s,%s\n"
+                % (int(relative_attn[csv_loop_counter]), throughput[csv_loop_counter])
+            )
 
 
-class WlanRvrTest(WifiBaseTest):
+class WlanRvrTest(base_test.WifiBaseTest):
     """Tests running WLAN RvR.
 
     Test Bed Requirement:
@@ -120,66 +132,73 @@
 
     def setup_class(self):
         super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.dut = create_wlan_device(self.fuchsia_devices[0])
+        elif device_type == "android_devices":
             self.dut = create_wlan_device(self.android_devices[0])
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
 
-        self.starting_attn = (self.user_params['rvr_settings'].get(
-            'starting_attn', 0))
+        self.starting_attn = self.user_params["rvr_settings"].get("starting_attn", 0)
 
-        self.ending_attn = (self.user_params['rvr_settings'].get(
-            'ending_attn', 95))
+        self.ending_attn = self.user_params["rvr_settings"].get("ending_attn", 95)
 
-        self.step_size_in_db = (self.user_params['rvr_settings'].get(
-            'step_size_in_db', 1))
+        self.step_size_in_db = self.user_params["rvr_settings"].get(
+            "step_size_in_db", 1
+        )
 
-        self.dwell_time_in_secs = (self.user_params['rvr_settings'].get(
-            'dwell_time_in_secs', 10))
+        self.dwell_time_in_secs = self.user_params["rvr_settings"].get(
+            "dwell_time_in_secs", 10
+        )
 
         self.reverse_rvr_after_forward = bool(
-            (self.user_params['rvr_settings'].get('reverse_rvr_after_forward',
-                                                  None)))
+            (self.user_params["rvr_settings"].get("reverse_rvr_after_forward", None))
+        )
 
-        self.iperf_flags = (self.user_params['rvr_settings'].get(
-            'iperf_flags', '-i 1'))
+        self.iperf_flags = self.user_params["rvr_settings"].get("iperf_flags", "-i 1")
 
-        self.iperf_flags = '%s -t %s -J' % (self.iperf_flags,
-                                            self.dwell_time_in_secs)
+        self.iperf_flags = "%s -t %s -J" % (self.iperf_flags, self.dwell_time_in_secs)
 
-        self.debug_loop_count = (self.user_params['rvr_settings'].get(
-            'debug_loop_count', 1))
+        self.debug_loop_count = self.user_params["rvr_settings"].get(
+            "debug_loop_count", 1
+        )
 
-        self.debug_pre_traffic_cmd = (self.user_params['rvr_settings'].get(
-            'debug_pre_traffic_cmd', None))
+        self.debug_pre_traffic_cmd = self.user_params["rvr_settings"].get(
+            "debug_pre_traffic_cmd", None
+        )
 
-        self.debug_post_traffic_cmd = (self.user_params['rvr_settings'].get(
-            'debug_post_traffic_cmd', None))
+        self.debug_post_traffic_cmd = self.user_params["rvr_settings"].get(
+            "debug_post_traffic_cmd", None
+        )
 
         self.router_adv_daemon = None
 
-        if self.ending_attn == 'auto':
+        if self.ending_attn == "auto":
             self.use_auto_end = True
             self.ending_attn = 100
             if self.step_size_in_db > 2:
-                asserts.fail('When using an ending attenuation of \'auto\' '
-                             'please use a value < 2db.  Larger jumps will '
-                             'break the test reporting.')
+                asserts.fail(
+                    "When using an ending attenuation of 'auto' "
+                    "please use a value < 2db.  Larger jumps will "
+                    "break the test reporting."
+                )
 
         self.access_point = self.access_points[0]
         self.attenuators_2g = get_attenuators_for_device(
-            self.controller_configs['AccessPoint'][0]['Attenuator'],
-            self.attenuators, 'attenuator_ports_wifi_2g')
+            self.controller_configs["AccessPoint"][0]["Attenuator"],
+            self.attenuators,
+            "attenuator_ports_wifi_2g",
+        )
         self.attenuators_5g = get_attenuators_for_device(
-            self.controller_configs['AccessPoint'][0]['Attenuator'],
-            self.attenuators, 'attenuator_ports_wifi_5g')
+            self.controller_configs["AccessPoint"][0]["Attenuator"],
+            self.attenuators,
+            "attenuator_ports_wifi_5g",
+        )
 
         self.iperf_server = self.iperf_servers[0]
 
@@ -212,15 +231,18 @@
             output_path = context.get_current_context().get_base_output_path()
             test_class_name = context.get_current_context().test_class_name
 
-            output_file(f'{output_path}/{test_class_name}/rvr_summary.html',
-                        title='RvR Sumamry')
+            output_file(
+                f"{output_path}/{test_class_name}/rvr_summary.html", title="RvR Sumamry"
+            )
             save(list(self.rvr_graph_summary))
         except ImportError as e:
-            logging.warn("bokeh is not installed: skipping creation of graphs. "
-                         "Note CSV files are still available. If graphs are "
-                         "desired, install antlion with the \"bokeh\" feature.")
+            logging.warn(
+                "bokeh is not installed: skipping creation of graphs. "
+                "Note CSV files are still available. If graphs are "
+                'desired, install antlion with the "bokeh" feature.'
+            )
         except Exception as e:
-            self.log.error(f'Unable to generate RvR summary file: {e}')
+            self.log.error(f"Unable to generate RvR summary file: {e}")
 
         super().teardown_class()
 
@@ -237,7 +259,7 @@
         if self.router_adv_daemon:
             output_path = context.get_current_context().get_base_output_path()
             full_output_path = os.path.join(output_path, "radvd_log.txt")
-            radvd_log_file = open(full_output_path, 'w')
+            radvd_log_file = open(full_output_path, "w")
             radvd_log_file.write(self.router_adv_daemon.pull_logs())
             radvd_log_file.close()
             self.router_adv_daemon.stop()
@@ -267,34 +289,36 @@
         ip_address_checker_max_attempts = 3
         while ip_address_checker_counter < ip_address_checker_max_attempts:
             self.iperf_server.renew_test_interface_ip_address()
-            iperf_server_ip_addresses = (
-                self.iperf_server.get_interface_ip_addresses(
-                    self.iperf_server.test_interface))
+            iperf_server_ip_addresses = self.iperf_server.get_interface_ip_addresses(
+                self.iperf_server.test_interface
+            )
             dut_ip_addresses = self.dut.device.get_interface_ip_addresses(
-                self.dut_iperf_client.test_interface)
+                self.dut_iperf_client.test_interface
+            )
 
-            self.log.info(
-                'IPerf server IP info: {}'.format(iperf_server_ip_addresses))
-            self.log.info('DUT IP info: {}'.format(dut_ip_addresses))
+            self.log.info("IPerf server IP info: {}".format(iperf_server_ip_addresses))
+            self.log.info("DUT IP info: {}".format(dut_ip_addresses))
 
-            if not iperf_server_ip_addresses['ipv4_private']:
-                self.log.warn('Unable to get the iperf server IPv4 '
-                              'address. Retrying...')
+            if not iperf_server_ip_addresses["ipv4_private"]:
+                self.log.warn(
+                    "Unable to get the iperf server IPv4 " "address. Retrying..."
+                )
                 ip_address_checker_counter += 1
                 time.sleep(1)
                 continue
 
-            if dut_ip_addresses['ipv4_private']:
-                return iperf_server_ip_addresses['ipv4_private'][0]
+            if dut_ip_addresses["ipv4_private"]:
+                return iperf_server_ip_addresses["ipv4_private"][0]
 
-            self.log.warn('Unable to get the DUT IPv4 address starting at '
-                          'attenuation "{}". Retrying...'.format(
-                              self.starting_attn))
+            self.log.warn(
+                "Unable to get the DUT IPv4 address starting at "
+                'attenuation "{}". Retrying...'.format(self.starting_attn)
+            )
             ip_address_checker_counter += 1
             time.sleep(1)
 
         asserts.fail(
-            'IPv4 addresses are not available on both the DUT and iperf server.'
+            "IPv4 addresses are not available on both the DUT and iperf server."
         )
 
     # TODO (b/258264565): Merge with fuchsia_device wait_for_ipv6_addr.
@@ -320,25 +344,27 @@
             addrs = device.get_interface_ip_addresses(test_interface)
             now = time.time()
             elapsed = now - start
-            if addrs['ipv6_private_local']:
+            if addrs["ipv6_private_local"]:
                 # DAD has completed
-                addr = addrs['ipv6_private_local'][0]
-                self.log.info('DAD resolved with "{}" after {}s'.format(
-                    addr, elapsed))
+                addr = addrs["ipv6_private_local"][0]
+                self.log.info('DAD resolved with "{}" after {}s'.format(addr, elapsed))
                 return addr
             time.sleep(1)
         else:
             asserts.fail(
-                'Unable to acquire a private-local IPv6 address for testing '
-                'after {}s'.format(elapsed))
+                "Unable to acquire a private-local IPv6 address for testing "
+                "after {}s".format(elapsed)
+            )
 
-    def run_rvr(self,
-                ssid,
-                security_mode=None,
-                password=None,
-                band='2g',
-                traffic_dir='tx',
-                ip_version=4):
+    def run_rvr(
+        self,
+        ssid,
+        security_mode=None,
+        password=None,
+        band="2g",
+        traffic_dir="tx",
+        ip_version=4,
+    ):
         """Setups and runs the RvR test
 
         Args:
@@ -353,16 +379,17 @@
         """
         throughput = []
         relative_attn = []
-        if band == '2g':
+        if band == "2g":
             rvr_attenuators = self.attenuators_2g
-        elif band == '5g':
+        elif band == "5g":
             rvr_attenuators = self.attenuators_5g
         else:
-            raise ValueError('Invalid WLAN band specified: %s' % band)
+            raise ValueError("Invalid WLAN band specified: %s" % band)
         if ip_version == 6:
             self.router_adv_daemon = Radvd(
                 self.access_point.ssh,
-                self.access_point.interfaces.get_bridge_interface()[0])
+                self.access_point.interfaces.get_bridge_interface()[0],
+            )
             radvd_config = RadvdConfig()
             self.router_adv_daemon.start(radvd_config)
 
@@ -374,43 +401,52 @@
             associate_max_attempts = 3
             while associate_counter < associate_max_attempts:
                 if self.dut.associate(
-                        ssid,
-                        target_pwd=password,
-                        target_security=hostapd_constants.
-                        SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                            security_mode),
-                        check_connectivity=False):
+                    ssid,
+                    target_pwd=password,
+                    target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                        security_mode
+                    ),
+                    check_connectivity=False,
+                ):
                     break
                 else:
                     associate_counter += 1
             else:
-                asserts.fail('Unable to associate at starting '
-                             'attenuation: %s' % self.starting_attn)
+                asserts.fail(
+                    "Unable to associate at starting "
+                    "attenuation: %s" % self.starting_attn
+                )
 
             if ip_version == 4:
                 iperf_server_ip_address = self._wait_for_ipv4_addrs()
             elif ip_version == 6:
                 self.iperf_server.renew_test_interface_ip_address()
-                self.log.info('Waiting for iperf server to complete Duplicate '
-                              'Address Detection...')
+                self.log.info(
+                    "Waiting for iperf server to complete Duplicate "
+                    "Address Detection..."
+                )
                 iperf_server_ip_address = self._wait_for_dad(
-                    self.iperf_server, self.iperf_server.test_interface)
+                    self.iperf_server, self.iperf_server.test_interface
+                )
 
-                self.log.info('Waiting for DUT to complete Duplicate Address '
-                              'Detection for "{}"...'.format(
-                                  self.dut_iperf_client.test_interface))
-                _ = self._wait_for_dad(self.dut.device,
-                                       self.dut_iperf_client.test_interface)
+                self.log.info(
+                    "Waiting for DUT to complete Duplicate Address "
+                    'Detection for "{}"...'.format(self.dut_iperf_client.test_interface)
+                )
+                _ = self._wait_for_dad(
+                    self.dut.device, self.dut_iperf_client.test_interface
+                )
             else:
-                raise ValueError('Invalid IP version: {}'.format(ip_version))
+                raise ValueError("Invalid IP version: {}".format(ip_version))
 
-            throughput, relative_attn = (self.rvr_loop(
+            throughput, relative_attn = self.rvr_loop(
                 traffic_dir,
                 rvr_attenuators,
                 iperf_server_ip_address,
                 ip_version,
                 throughput=throughput,
-                relative_attn=relative_attn))
+                relative_attn=relative_attn,
+            )
             if self.reverse_rvr_after_forward:
                 throughput, relative_attn = self.rvr_loop(
                     traffic_dir,
@@ -422,29 +458,32 @@
                     password=password,
                     reverse=True,
                     throughput=throughput,
-                    relative_attn=relative_attn)
+                    relative_attn=relative_attn,
+                )
             self.dut.disconnect()
 
         throughput_vs_attn = {
-            'throughput': throughput,
-            'relative_attn': relative_attn,
-            'x_label': 'Attenuation(db)',
-            'y_label': 'Throughput(%s)' % REPORTING_SPEED_UNITS
+            "throughput": throughput,
+            "relative_attn": relative_attn,
+            "x_label": "Attenuation(db)",
+            "y_label": "Throughput(%s)" % REPORTING_SPEED_UNITS,
         }
-        graph_data = {'throughput_vs_attn': throughput_vs_attn}
+        graph_data = {"throughput_vs_attn": throughput_vs_attn}
         return graph_data
 
-    def rvr_loop(self,
-                 traffic_dir,
-                 rvr_attenuators,
-                 iperf_server_ip_address,
-                 ip_version,
-                 ssid=None,
-                 security_mode=None,
-                 password=None,
-                 reverse=False,
-                 throughput=None,
-                 relative_attn=None):
+    def rvr_loop(
+        self,
+        traffic_dir,
+        rvr_attenuators,
+        iperf_server_ip_address,
+        ip_version,
+        ssid=None,
+        security_mode=None,
+        password=None,
+        reverse=False,
+        throughput=None,
+        relative_attn=None,
+    ):
         """The loop that goes through each attenuation level and runs the iperf
         throughput pair.
         Args:
@@ -463,10 +502,10 @@
         Returns:
             throughput: The list of throughput data for the test.
             relative_attn: The list of attenuation data for the test.
-            """
+        """
         iperf_flags = self.iperf_flags
-        if traffic_dir == 'rx':
-            iperf_flags = '%s -R' % self.iperf_flags
+        if traffic_dir == "rx":
+            iperf_flags = "%s -R" % self.iperf_flags
         starting_attn = self.starting_attn
         ending_attn = self.ending_attn
         step_size_in_db = self.step_size_in_db
@@ -481,103 +520,123 @@
                     attenuator.set_atten(step)
             except ValueError as e:
                 self.log.error(
-                    f'{step} is beyond the max or min of the testbed '
-                    f'attenuator\'s capability. Stopping. {e}')
+                    f"{step} is beyond the max or min of the testbed "
+                    f"attenuator's capability. Stopping. {e}"
+                )
                 break
-            self.log.info('Set relative attenuation to %s db' % step)
+            self.log.info("Set relative attenuation to %s db" % step)
 
             associated = self.dut.is_connected()
             if associated:
-                self.log.info('DUT is currently associated.')
+                self.log.info("DUT is currently associated.")
             else:
-                self.log.info('DUT is not currently associated.')
+                self.log.info("DUT is not currently associated.")
 
             if reverse:
                 if not associated:
-                    self.log.info('Trying to associate at relative '
-                                  'attenuation of %s db' % step)
+                    self.log.info(
+                        "Trying to associate at relative " "attenuation of %s db" % step
+                    )
                     if self.dut.associate(
-                            ssid,
-                            target_pwd=password,
-                            target_security=hostapd_constants.
-                            SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                                security_mode),
-                            check_connectivity=False):
+                        ssid,
+                        target_pwd=password,
+                        target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                            security_mode
+                        ),
+                        check_connectivity=False,
+                    ):
                         associated = True
-                        self.log.info('Successfully associated.')
+                        self.log.info("Successfully associated.")
                     else:
                         associated = False
                         self.log.info(
-                            'Association failed. Marking a 0 %s for'
-                            ' throughput. Skipping running traffic.' %
-                            REPORTING_SPEED_UNITS)
+                            "Association failed. Marking a 0 %s for"
+                            " throughput. Skipping running traffic."
+                            % REPORTING_SPEED_UNITS
+                        )
             attn_value_inserted = False
             value_to_insert = str(step)
             while not attn_value_inserted:
                 if value_to_insert in relative_attn:
-                    value_to_insert = '%s ' % value_to_insert
+                    value_to_insert = "%s " % value_to_insert
                 else:
                     relative_attn.append(value_to_insert)
                     attn_value_inserted = True
 
             dut_ip_addresses = self.dut.device.get_interface_ip_addresses(
-                self.dut_iperf_client.test_interface)
+                self.dut_iperf_client.test_interface
+            )
             if ip_version == 4:
-                if not dut_ip_addresses['ipv4_private']:
-                    self.log.info('DUT does not have an IPv4 address. '
-                                  'Traffic attempt to be run if the server '
-                                  'is pingable.')
+                if not dut_ip_addresses["ipv4_private"]:
+                    self.log.info(
+                        "DUT does not have an IPv4 address. "
+                        "Traffic attempt to be run if the server "
+                        "is pingable."
+                    )
                 else:
-                    self.log.info('DUT has the following IPv4 address: "%s"' %
-                                  dut_ip_addresses['ipv4_private'][0])
+                    self.log.info(
+                        'DUT has the following IPv4 address: "%s"'
+                        % dut_ip_addresses["ipv4_private"][0]
+                    )
             elif ip_version == 6:
-                if not dut_ip_addresses['ipv6_private_local']:
-                    self.log.info('DUT does not have an IPv6 address. '
-                                  'Traffic attempt to be run if the server '
-                                  'is pingable.')
+                if not dut_ip_addresses["ipv6_private_local"]:
+                    self.log.info(
+                        "DUT does not have an IPv6 address. "
+                        "Traffic attempt to be run if the server "
+                        "is pingable."
+                    )
                 else:
-                    self.log.info('DUT has the following IPv6 address: "%s"' %
-                                  dut_ip_addresses['ipv6_private_local'][0])
+                    self.log.info(
+                        'DUT has the following IPv6 address: "%s"'
+                        % dut_ip_addresses["ipv6_private_local"][0]
+                    )
             server_pingable = self.dut.can_ping(iperf_server_ip_address)
             if not server_pingable:
-                self.log.info('Iperf server "%s" is not pingable. Marking '
-                              'a 0 %s for throughput. Skipping running '
-                              'traffic.' %
-                              (iperf_server_ip_address, REPORTING_SPEED_UNITS))
-            else:
-                self.log.info('Iperf server "%s" is pingable.' %
-                              iperf_server_ip_address)
-            if self.debug_pre_traffic_cmd:
-                self.log.info('\nDEBUG: Sending command \'%s\' to DUT' %
-                              self.debug_pre_traffic_cmd)
                 self.log.info(
-                    '\n%s' % self.dut.send_command(self.debug_pre_traffic_cmd))
+                    'Iperf server "%s" is not pingable. Marking '
+                    "a 0 %s for throughput. Skipping running "
+                    "traffic." % (iperf_server_ip_address, REPORTING_SPEED_UNITS)
+                )
+            else:
+                self.log.info(
+                    'Iperf server "%s" is pingable.' % iperf_server_ip_address
+                )
+            if self.debug_pre_traffic_cmd:
+                self.log.info(
+                    "\nDEBUG: Sending command '%s' to DUT" % self.debug_pre_traffic_cmd
+                )
+                self.log.info(
+                    "\n%s" % self.dut.send_command(self.debug_pre_traffic_cmd)
+                )
             if server_pingable:
-                if traffic_dir == 'tx':
-                    self.log.info('Running traffic DUT to %s at relative '
-                                  'attenuation of %s' %
-                                  (iperf_server_ip_address, step))
-                elif traffic_dir == 'rx':
-                    self.log.info('Running traffic %s to DUT at relative '
-                                  'attenuation of %s' %
-                                  (iperf_server_ip_address, step))
+                if traffic_dir == "tx":
+                    self.log.info(
+                        "Running traffic DUT to %s at relative "
+                        "attenuation of %s" % (iperf_server_ip_address, step)
+                    )
+                elif traffic_dir == "rx":
+                    self.log.info(
+                        "Running traffic %s to DUT at relative "
+                        "attenuation of %s" % (iperf_server_ip_address, step)
+                    )
                 else:
-                    raise ValueError('Invalid traffic direction')
+                    raise ValueError("Invalid traffic direction")
                 try:
-                    iperf_tag = 'decreasing'
+                    iperf_tag = "decreasing"
                     if reverse:
-                        iperf_tag = 'increasing'
+                        iperf_tag = "increasing"
                     iperf_results_file = self.dut_iperf_client.start(
                         iperf_server_ip_address,
                         iperf_flags,
-                        '%s_%s_%s' %
-                        (iperf_tag, traffic_dir, self.starting_attn),
-                        timeout=(self.dwell_time_in_secs * 2))
+                        "%s_%s_%s" % (iperf_tag, traffic_dir, self.starting_attn),
+                        timeout=(self.dwell_time_in_secs * 2),
+                    )
                 except TimeoutError as e:
                     iperf_results_file = None
                     self.log.error(
-                        f'Iperf traffic timed out. Marking 0 {REPORTING_SPEED_UNITS} for '
-                        f'throughput. {e}')
+                        f"Iperf traffic timed out. Marking 0 {REPORTING_SPEED_UNITS} for "
+                        f"throughput. {e}"
+                    )
 
                 if not iperf_results_file:
                     throughput.append(0)
@@ -585,12 +644,13 @@
                     try:
                         iperf_results = IPerfResult(
                             iperf_results_file,
-                            reporting_speed_units=REPORTING_SPEED_UNITS)
+                            reporting_speed_units=REPORTING_SPEED_UNITS,
+                        )
                         if iperf_results.error:
                             self.iperf_server.stop()
                             self.iperf_server.start()
                             self.log.error(
-                                f'Errors in iperf logs:\n{iperf_results.error}'
+                                f"Errors in iperf logs:\n{iperf_results.error}"
                             )
                         if not iperf_results.avg_send_rate:
                             throughput.append(0)
@@ -600,393 +660,478 @@
                         self.iperf_server.stop()
                         self.iperf_server.start()
                         self.log.error(
-                            f'No data in iPerf3 file. Marking 0 {REPORTING_SPEED_UNITS} '
-                            f'for throughput: {e}')
+                            f"No data in iPerf3 file. Marking 0 {REPORTING_SPEED_UNITS} "
+                            f"for throughput: {e}"
+                        )
                         throughput.append(0)
                     except Exception as e:
                         self.iperf_server.stop()
                         self.iperf_server.start()
                         self.log.error(
-                            f'Unknown exception. Marking 0 {REPORTING_SPEED_UNITS} for '
-                            f'throughput: {e}')
+                            f"Unknown exception. Marking 0 {REPORTING_SPEED_UNITS} for "
+                            f"throughput: {e}"
+                        )
                         self.log.error(e)
                         throughput.append(0)
 
                 self.log.info(
-                    'Iperf traffic complete. %s traffic received at '
-                    '%s %s at relative attenuation of %s db' %
-                    (traffic_dir, throughput[-1], REPORTING_SPEED_UNITS,
-                     str(relative_attn[-1]).strip()))
+                    "Iperf traffic complete. %s traffic received at "
+                    "%s %s at relative attenuation of %s db"
+                    % (
+                        traffic_dir,
+                        throughput[-1],
+                        REPORTING_SPEED_UNITS,
+                        str(relative_attn[-1]).strip(),
+                    )
+                )
 
             else:
-                self.log.debug('DUT Associated: %s' % associated)
-                self.log.debug('%s pingable: %s' %
-                               (iperf_server_ip_address, server_pingable))
+                self.log.debug("DUT Associated: %s" % associated)
+                self.log.debug(
+                    "%s pingable: %s" % (iperf_server_ip_address, server_pingable)
+                )
                 throughput.append(0)
             if self.debug_post_traffic_cmd:
-                self.log.info('\nDEBUG: Sending command \'%s\' to DUT' %
-                              self.debug_post_traffic_cmd)
                 self.log.info(
-                    '\n%s' %
-                    self.dut.send_command(self.debug_post_traffic_cmd))
+                    "\nDEBUG: Sending command '%s' to DUT" % self.debug_post_traffic_cmd
+                )
+                self.log.info(
+                    "\n%s" % self.dut.send_command(self.debug_post_traffic_cmd)
+                )
         return throughput, relative_attn
 
     def test_rvr_11ac_5g_80mhz_open_tx_ipv4(self):
         ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='5g',
-                                  traffic_dir='tx',
-                                  ip_version=4)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(ssid, band="5g", traffic_dir="tx", ip_version=4)
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11ac_5g_80mhz_open_rx_ipv4(self):
         ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='5g',
-                                  traffic_dir='rx',
-                                  ip_version=4)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(ssid, band="5g", traffic_dir="rx", ip_version=4)
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11ac_5g_80mhz_open_tx_ipv6(self):
         ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='5g',
-                                  traffic_dir='tx',
-                                  ip_version=6)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(ssid, band="5g", traffic_dir="tx", ip_version=6)
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11ac_5g_80mhz_open_rx_ipv6(self):
         ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='5g',
-                                  traffic_dir='rx',
-                                  ip_version=6)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(ssid, band="5g", traffic_dir="rx", ip_version=6)
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11ac_5g_80mhz_wpa2_tx_ipv4(self):
         ssid = rand_ascii_str(20)
         password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='5g',
-                                  traffic_dir='tx',
-                                  ip_version=4)
+        security_profile = Security(security_mode="wpa2", password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            security=security_profile,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(
+            ssid,
+            security_mode="wpa2",
+            password=password,
+            band="5g",
+            traffic_dir="tx",
+            ip_version=4,
+        )
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11ac_5g_80mhz_wpa2_rx_ipv4(self):
         ssid = rand_ascii_str(20)
         password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='5g',
-                                  traffic_dir='rx',
-                                  ip_version=4)
+        security_profile = Security(security_mode="wpa2", password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            security=security_profile,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(
+            ssid,
+            security_mode="wpa2",
+            password=password,
+            band="5g",
+            traffic_dir="rx",
+            ip_version=4,
+        )
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11ac_5g_80mhz_wpa2_tx_ipv6(self):
         ssid = rand_ascii_str(20)
         password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='5g',
-                                  traffic_dir='tx',
-                                  ip_version=6)
+        security_profile = Security(security_mode="wpa2", password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            security=security_profile,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(
+            ssid,
+            security_mode="wpa2",
+            password=password,
+            band="5g",
+            traffic_dir="tx",
+            ip_version=6,
+        )
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11ac_5g_80mhz_wpa2_rx_ipv6(self):
         ssid = rand_ascii_str(20)
         password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='5g',
-                                  traffic_dir='rx',
-                                  ip_version=6)
+        security_profile = Security(security_mode="wpa2", password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            security=security_profile,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(
+            ssid,
+            security_mode="wpa2",
+            password=password,
+            band="5g",
+            traffic_dir="rx",
+            ip_version=6,
+        )
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11n_2g_20mhz_open_tx_ipv4(self):
         ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='2g',
-                                  traffic_dir='tx',
-                                  ip_version=4)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=ssid,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(ssid, band="2g", traffic_dir="tx", ip_version=4)
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11n_2g_20mhz_open_rx_ipv4(self):
         ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='2g',
-                                  traffic_dir='rx',
-                                  ip_version=4)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=ssid,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(ssid, band="2g", traffic_dir="rx", ip_version=4)
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11n_2g_20mhz_open_tx_ipv6(self):
         ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='2g',
-                                  traffic_dir='tx',
-                                  ip_version=6)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=ssid,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(ssid, band="2g", traffic_dir="tx", ip_version=6)
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11n_2g_20mhz_open_rx_ipv6(self):
         ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='2g',
-                                  traffic_dir='rx',
-                                  ip_version=6)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=ssid,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(ssid, band="2g", traffic_dir="rx", ip_version=6)
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11n_2g_20mhz_wpa2_tx_ipv4(self):
         ssid = rand_ascii_str(20)
         password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='2g',
-                                  traffic_dir='tx',
-                                  ip_version=4)
+        security_profile = Security(security_mode="wpa2", password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=ssid,
+            security=security_profile,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(
+            ssid,
+            security_mode="wpa2",
+            password=password,
+            band="2g",
+            traffic_dir="tx",
+            ip_version=4,
+        )
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11n_2g_20mhz_wpa2_rx_ipv4(self):
         ssid = rand_ascii_str(20)
         password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='2g',
-                                  traffic_dir='rx',
-                                  ip_version=4)
+        security_profile = Security(security_mode="wpa2", password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=ssid,
+            security=security_profile,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(
+            ssid,
+            security_mode="wpa2",
+            password=password,
+            band="2g",
+            traffic_dir="rx",
+            ip_version=4,
+        )
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11n_2g_20mhz_wpa2_tx_ipv6(self):
         ssid = rand_ascii_str(20)
         password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='2g',
-                                  traffic_dir='tx',
-                                  ip_version=6)
+        security_profile = Security(security_mode="wpa2", password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=ssid,
+            security=security_profile,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(
+            ssid,
+            security_mode="wpa2",
+            password=password,
+            band="2g",
+            traffic_dir="tx",
+            ip_version=6,
+        )
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
 
     def test_rvr_11n_2g_20mhz_wpa2_rx_ipv6(self):
         ssid = rand_ascii_str(20)
         password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='2g',
-                                  traffic_dir='rx',
-                                  ip_version=6)
+        security_profile = Security(security_mode="wpa2", password=password)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=ssid,
+            security=security_profile,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(
+            ssid,
+            security_mode="wpa2",
+            password=password,
+            band="2g",
+            traffic_dir="rx",
+            ip_version=6,
+        )
         for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
+            self.test_name,
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        ):
             self.rvr_graph_summary.append(rvr_graph)
         write_csv_rvr_data(
             self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
+            context.get_current_context().get_full_output_path(),
+            graph_data,
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/WlanWmmTest.py b/src/antlion/tests/wlan/performance/WlanWmmTest.py
index 2094232..bf155fc 100644
--- a/src/antlion/tests/wlan/performance/WlanWmmTest.py
+++ b/src/antlion/tests/wlan/performance/WlanWmmTest.py
@@ -17,29 +17,29 @@
 import operator
 import time
 
-from antlion import asserts
-from antlion import context
-from antlion import utils
+from antlion import context, utils
 from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
+from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
 from antlion.test_utils.abstract_devices import wmm_transceiver
 from antlion.test_utils.fuchsia import wmm_test_cases
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
 from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
 
+from mobly import asserts, test_runner
+
 DEFAULT_N_CAPABILITIES_20_MHZ = [
-    hostapd_constants.N_CAPABILITY_LDPC, hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_LDPC,
+    hostapd_constants.N_CAPABILITY_SGI20,
     hostapd_constants.N_CAPABILITY_TX_STBC,
     hostapd_constants.N_CAPABILITY_RX_STBC1,
-    hostapd_constants.N_CAPABILITY_HT20
+    hostapd_constants.N_CAPABILITY_HT20,
 ]
 
 DEFAULT_AP_PARAMS = {
-    'profile_name': 'whirlwind',
-    'channel': hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-    'n_capabilities': DEFAULT_N_CAPABILITIES_20_MHZ,
-    'ac_capabilities': None
+    "profile_name": "whirlwind",
+    "channel": hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+    "n_capabilities": DEFAULT_N_CAPABILITIES_20_MHZ,
+    "ac_capabilities": None,
 }
 
 DEFAULT_BW_PERCENTAGE = 1
@@ -47,11 +47,11 @@
 DEFAULT_STREAM_TIME = 10
 
 OPERATORS = {
-    '>': operator.gt,
-    '>=': operator.ge,
-    '<': operator.lt,
-    '<=': operator.le,
-    '==': operator.eq
+    ">": operator.gt,
+    ">=": operator.ge,
+    "<": operator.lt,
+    "<=": operator.le,
+    "==": operator.eq,
 }
 
 GRAPH_COLOR_LEN = 10
@@ -59,13 +59,15 @@
 GRAPH_DEFAULT_CIRCLE_SIZE = 10
 
 
-def eval_operator(operator_string,
-                  actual_value,
-                  expected_value,
-                  max_bw,
-                  rel_tolerance=0,
-                  abs_tolerance=0,
-                  max_bw_rel_tolerance=0):
+def eval_operator(
+    operator_string,
+    actual_value,
+    expected_value,
+    max_bw,
+    rel_tolerance=0,
+    abs_tolerance=0,
+    max_bw_rel_tolerance=0,
+):
     """
     Determines if an inequality evaluates to True, given relative and absolute
     tolerance.
@@ -93,12 +95,13 @@
         return True
 
     error = abs(actual_value - expected_value)
-    accepted_error = max(expected_value * rel_tolerance, abs_tolerance,
-                         max_bw * max_bw_rel_tolerance)
+    accepted_error = max(
+        expected_value * rel_tolerance, abs_tolerance, max_bw * max_bw_rel_tolerance
+    )
     return error <= accepted_error
 
 
-class WlanWmmTest(WifiBaseTest):
+class WlanWmmTest(base_test.WifiBaseTest):
     """Tests WMM QoS Functionality (Station only)
 
     Testbed Requirements:
@@ -113,19 +116,19 @@
         super().setup_class()
 
         try:
-            self.wmm_test_params = self.user_params['wmm_test_params']
-            self._wmm_transceiver_configs = self.wmm_test_params[
-                'wmm_transceivers']
+            self.wmm_test_params = self.user_params["wmm_test_params"]
+            self._wmm_transceiver_configs = self.wmm_test_params["wmm_transceivers"]
         except KeyError:
-            raise AttributeError('Must provide at least 2 WmmTransceivers in '
-                                 '"wmm_test_params" field of ACTS config.')
+            raise AttributeError(
+                "Must provide at least 2 WmmTransceivers in "
+                '"wmm_test_params" field of ACTS config.'
+            )
 
         if len(self._wmm_transceiver_configs) < 2:
-            raise AttributeError(
-                'At least 2 WmmTransceivers must be provided.')
+            raise AttributeError("At least 2 WmmTransceivers must be provided.")
 
-        self.android_devices = getattr(self, 'android_devices', [])
-        self.fuchsia_devices = getattr(self, 'fuchsia_devices', [])
+        self.android_devices = getattr(self, "android_devices", [])
+        self.fuchsia_devices = getattr(self, "fuchsia_devices", [])
 
         self.wlan_devices = [
             create_wlan_device(device)
@@ -133,44 +136,45 @@
         ]
 
         # Create STAUT transceiver
-        if 'staut' not in self._wmm_transceiver_configs:
+        if "staut" not in self._wmm_transceiver_configs:
             raise AttributeError(
-                'Must provide a WmmTransceiver labeled "staut" with a '
-                'wlan_device.')
+                'Must provide a WmmTransceiver labeled "staut" with a ' "wlan_device."
+            )
         self.staut = wmm_transceiver.create(
-            self._wmm_transceiver_configs['staut'],
-            identifier='staut',
-            wlan_devices=self.wlan_devices)
+            self._wmm_transceiver_configs["staut"],
+            identifier="staut",
+            wlan_devices=self.wlan_devices,
+        )
 
         # Required to for automated power cycling
         self.dut = self.staut.wlan_device
 
         # Create AP transceiver
-        if 'access_point' not in self._wmm_transceiver_configs:
+        if "access_point" not in self._wmm_transceiver_configs:
             raise AttributeError(
                 'Must provide a WmmTransceiver labeled "access_point" with a '
-                'access_point.')
+                "access_point."
+            )
         self.access_point_transceiver = wmm_transceiver.create(
-            self._wmm_transceiver_configs['access_point'],
-            identifier='access_point',
-            access_points=self.access_points)
+            self._wmm_transceiver_configs["access_point"],
+            identifier="access_point",
+            access_points=self.access_points,
+        )
 
         self.wmm_transceivers = [self.staut, self.access_point_transceiver]
 
         # Create secondary station transceiver, if present
-        if 'secondary_sta' in self._wmm_transceiver_configs:
+        if "secondary_sta" in self._wmm_transceiver_configs:
             self.secondary_sta = wmm_transceiver.create(
-                self._wmm_transceiver_configs['secondary_sta'],
-                identifier='secondary_sta',
-                wlan_devices=self.wlan_devices)
+                self._wmm_transceiver_configs["secondary_sta"],
+                identifier="secondary_sta",
+                wlan_devices=self.wlan_devices,
+            )
             self.wmm_transceivers.append(self.secondary_sta)
         else:
             self.secondary_sta = None
 
-        self.wmm_transceiver_map = {
-            tc.identifier: tc
-            for tc in self.wmm_transceivers
-        }
+        self.wmm_transceiver_map = {tc.identifier: tc for tc in self.wmm_transceivers}
 
     def setup_test(self):
         for tc in self.wmm_transceivers:
@@ -210,39 +214,39 @@
             String, subnet of the network setup (e.g. '192.168.1.0/24')
         """
         # Defaults for required parameters
-        ap_parameters['force_wmm'] = True
-        if 'ssid' not in ap_parameters:
-            ap_parameters['ssid'] = utils.rand_ascii_str(
-                hostapd_constants.AP_SSID_LENGTH_2G)
+        ap_parameters["force_wmm"] = True
+        if "ssid" not in ap_parameters:
+            ap_parameters["ssid"] = utils.rand_ascii_str(
+                hostapd_constants.AP_SSID_LENGTH_2G
+            )
 
-        if 'profile_name' not in ap_parameters:
-            ap_parameters['profile_name'] = 'whirlwind'
+        if "profile_name" not in ap_parameters:
+            ap_parameters["profile_name"] = "whirlwind"
 
-        if 'channel' not in ap_parameters:
-            ap_parameters['channel'] = 6
+        if "channel" not in ap_parameters:
+            ap_parameters["channel"] = 6
 
-        if 'n_capabilities' not in ap_parameters:
-            ap_parameters['n_capabilities'] = DEFAULT_N_CAPABILITIES_20_MHZ
+        if "n_capabilities" not in ap_parameters:
+            ap_parameters["n_capabilities"] = DEFAULT_N_CAPABILITIES_20_MHZ
 
-        if 'additional_ap_parameters' in ap_parameters:
-            ap_parameters['additional_ap_parameters'].update(wmm_parameters)
+        if "additional_ap_parameters" in ap_parameters:
+            ap_parameters["additional_ap_parameters"].update(wmm_parameters)
         else:
-            ap_parameters['additional_ap_parameters'] = wmm_parameters
+            ap_parameters["additional_ap_parameters"] = wmm_parameters
 
         # Optional security
-        security_config = ap_parameters.get('security_config', None)
+        security_config = ap_parameters.get("security_config", None)
         if security_config:
-            ap_parameters['security'] = hostapd_security.Security(
-                **security_config)
-            ap_parameters.pop('security_config')
+            ap_parameters["security"] = hostapd_security.Security(**security_config)
+            ap_parameters.pop("security_config")
 
         # Start AP with kwargs
-        self.log.info('Setting up WMM network: %s' % ap_parameters['ssid'])
+        self.log.info("Setting up WMM network: %s" % ap_parameters["ssid"])
         setup_ap(self.access_point_transceiver.access_point, **ap_parameters)
-        self.log.info('Network (%s) is up.' % ap_parameters['ssid'])
+        self.log.info("Network (%s) is up." % ap_parameters["ssid"])
 
         # Return subnet
-        if ap_parameters['channel'] < hostapd_constants.LOWEST_5G_CHANNEL:
+        if ap_parameters["channel"] < hostapd_constants.LOWEST_5G_CHANNEL:
             return self.access_point_transceiver.access_point._AP_2G_SUBNET_STR
         else:
             return self.access_point_transceiver.access_point._AP_5G_SUBNET_STR
@@ -256,25 +260,27 @@
         """
         if not wmm_transceiver.wlan_device:
             raise AttributeError(
-                'Cannot associate a WmmTransceiver that does not have a '
-                'WlanDevice.')
-        ssid = ap_params['ssid']
+                "Cannot associate a WmmTransceiver that does not have a " "WlanDevice."
+            )
+        ssid = ap_params["ssid"]
         password = None
         target_security = None
-        security = ap_params.get('security')
+        security = ap_params.get("security")
         if security:
             password = security.password
-            target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                security.security_mode_string)
+            target_security = (
+                hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                    security.security_mode_string
+                )
+            )
         associated = wmm_transceiver.wlan_device.associate(
-            target_ssid=ssid,
-            target_pwd=password,
-            target_security=target_security)
+            target_ssid=ssid, target_pwd=password, target_security=target_security
+        )
         if not associated:
-            raise ConnectionError('Failed to associate WmmTransceiver %s.' %
-                                  wmm_transceiver.identifier)
-        self.log.info('WmmTransceiver %s associated.' %
-                      wmm_transceiver.identifier)
+            raise ConnectionError(
+                "Failed to associate WmmTransceiver %s." % wmm_transceiver.identifier
+            )
+        self.log.info("WmmTransceiver %s associated." % wmm_transceiver.identifier)
 
     def validate_streams_in_phase(self, phase_id, phases, max_bw):
         """Validates any stream in a phase that has validation criteria.
@@ -291,106 +297,138 @@
         """
         pass_val = True
         for stream_id, stream in phases[phase_id].items():
-            if 'validation' in stream:
-                transmitter = stream['transmitter']
-                uuid = stream['uuid']
+            if "validation" in stream:
+                transmitter = stream["transmitter"]
+                uuid = stream["uuid"]
                 actual_bw = transmitter.get_results(uuid).avg_rate
                 if not actual_bw:
                     raise ConnectionError(
-                        '(Phase: %s, Stream: %s) - Stream results show '
-                        'bandwidth: None' % (phase_id, stream_id))
-                for check in stream['validation']:
-                    operator_str = check['operator']
-                    rel_tolerance = check.get('rel_tolerance', 0)
-                    abs_tolerance = check.get('abs_tolerance', 0)
-                    max_bw_rel_tolerance = check.get('max_bw_rel_tolerance', 0)
-                    expected_bw_percentage = check.get('bandwidth_percentage',
-                                                       DEFAULT_BW_PERCENTAGE)
+                        "(Phase: %s, Stream: %s) - Stream results show "
+                        "bandwidth: None" % (phase_id, stream_id)
+                    )
+                for check in stream["validation"]:
+                    operator_str = check["operator"]
+                    rel_tolerance = check.get("rel_tolerance", 0)
+                    abs_tolerance = check.get("abs_tolerance", 0)
+                    max_bw_rel_tolerance = check.get("max_bw_rel_tolerance", 0)
+                    expected_bw_percentage = check.get(
+                        "bandwidth_percentage", DEFAULT_BW_PERCENTAGE
+                    )
                     # Explicit Bandwidth Validation
-                    if 'bandwidth' in check:
-                        comp_bw = check['bandwidth']
+                    if "bandwidth" in check:
+                        comp_bw = check["bandwidth"]
                         log_msg = (
-                            'Expected Bandwidth: %s (explicit validation '
-                            'bandwidth [%s] x expected bandwidth '
-                            'percentage [%s])' %
-                            (expected_bw_percentage * comp_bw, comp_bw,
-                             expected_bw_percentage))
+                            "Expected Bandwidth: %s (explicit validation "
+                            "bandwidth [%s] x expected bandwidth "
+                            "percentage [%s])"
+                            % (
+                                expected_bw_percentage * comp_bw,
+                                comp_bw,
+                                expected_bw_percentage,
+                            )
+                        )
 
                     # Stream Comparison Validation
-                    elif 'phase' in check and 'stream' in check:
-                        comp_phase_id = check['phase']
-                        comp_stream_id = check['stream']
+                    elif "phase" in check and "stream" in check:
+                        comp_phase_id = check["phase"]
+                        comp_stream_id = check["stream"]
                         comp_stream = phases[comp_phase_id][comp_stream_id]
-                        comp_transmitter = comp_stream['transmitter']
-                        comp_uuid = comp_stream['uuid']
-                        comp_bw = comp_transmitter.get_results(
-                            comp_uuid).avg_rate
+                        comp_transmitter = comp_stream["transmitter"]
+                        comp_uuid = comp_stream["uuid"]
+                        comp_bw = comp_transmitter.get_results(comp_uuid).avg_rate
                         log_msg = (
-                            'Expected Bandwidth: %s (bandwidth for phase: %s, '
-                            'stream: %s [%s] x expected bandwidth percentage '
-                            '[%s])' %
-                            (expected_bw_percentage * comp_bw, comp_phase_id,
-                             comp_stream_id, comp_bw, expected_bw_percentage))
+                            "Expected Bandwidth: %s (bandwidth for phase: %s, "
+                            "stream: %s [%s] x expected bandwidth percentage "
+                            "[%s])"
+                            % (
+                                expected_bw_percentage * comp_bw,
+                                comp_phase_id,
+                                comp_stream_id,
+                                comp_bw,
+                                expected_bw_percentage,
+                            )
+                        )
 
                     # Expected Bandwidth Validation
                     else:
-                        if 'bandwidth' in stream:
-                            comp_bw = stream['bandwidth']
+                        if "bandwidth" in stream:
+                            comp_bw = stream["bandwidth"]
                             log_msg = (
-                                'Expected Bandwidth: %s (expected stream '
-                                'bandwidth [%s] x expected bandwidth '
-                                'percentage [%s])' %
-                                (expected_bw_percentage * comp_bw, comp_bw,
-                                 expected_bw_percentage))
+                                "Expected Bandwidth: %s (expected stream "
+                                "bandwidth [%s] x expected bandwidth "
+                                "percentage [%s])"
+                                % (
+                                    expected_bw_percentage * comp_bw,
+                                    comp_bw,
+                                    expected_bw_percentage,
+                                )
+                            )
                         else:
                             max_bw_percentage = stream.get(
-                                'max_bandwidth_percentage',
-                                DEFAULT_BW_PERCENTAGE)
+                                "max_bandwidth_percentage", DEFAULT_BW_PERCENTAGE
+                            )
                             comp_bw = max_bw * max_bw_percentage
                             log_msg = (
-                                'Expected Bandwidth: %s (max bandwidth [%s] x '
-                                'stream bandwidth percentage [%s] x expected '
-                                'bandwidth percentage [%s])' %
-                                (expected_bw_percentage * comp_bw, max_bw,
-                                 max_bw_percentage, expected_bw_percentage))
+                                "Expected Bandwidth: %s (max bandwidth [%s] x "
+                                "stream bandwidth percentage [%s] x expected "
+                                "bandwidth percentage [%s])"
+                                % (
+                                    expected_bw_percentage * comp_bw,
+                                    max_bw,
+                                    max_bw_percentage,
+                                    expected_bw_percentage,
+                                )
+                            )
 
                     self.log.info(
-                        'Validation criteria - Stream: %s, '
-                        'Actual Bandwidth: %s, Operator: %s, %s, '
-                        'Relative Tolerance: %s, Absolute Tolerance: %s, Max '
-                        'Bandwidth Relative Tolerance: %s' %
-                        (stream_id, actual_bw, operator_str, log_msg,
-                         rel_tolerance, abs_tolerance, max_bw_rel_tolerance))
+                        "Validation criteria - Stream: %s, "
+                        "Actual Bandwidth: %s, Operator: %s, %s, "
+                        "Relative Tolerance: %s, Absolute Tolerance: %s, Max "
+                        "Bandwidth Relative Tolerance: %s"
+                        % (
+                            stream_id,
+                            actual_bw,
+                            operator_str,
+                            log_msg,
+                            rel_tolerance,
+                            abs_tolerance,
+                            max_bw_rel_tolerance,
+                        )
+                    )
 
                     if eval_operator(
-                            operator_str,
-                            actual_bw,
-                            comp_bw * expected_bw_percentage,
-                            max_bw,
-                            rel_tolerance=rel_tolerance,
-                            abs_tolerance=abs_tolerance,
-                            max_bw_rel_tolerance=max_bw_rel_tolerance):
+                        operator_str,
+                        actual_bw,
+                        comp_bw * expected_bw_percentage,
+                        max_bw,
+                        rel_tolerance=rel_tolerance,
+                        abs_tolerance=abs_tolerance,
+                        max_bw_rel_tolerance=max_bw_rel_tolerance,
+                    ):
                         self.log.info(
-                            '(Phase: %s, Stream: %s) - PASSES validation check!'
-                            % (phase_id, stream_id))
+                            "(Phase: %s, Stream: %s) - PASSES validation check!"
+                            % (phase_id, stream_id)
+                        )
                     else:
                         self.log.info(
-                            '(Phase: %s, Stream: %s) - Stream FAILS validation '
-                            'check.' % (phase_id, stream_id))
+                            "(Phase: %s, Stream: %s) - Stream FAILS validation "
+                            "check." % (phase_id, stream_id)
+                        )
                         pass_val = False
         if pass_val:
             self.log.info(
-                '(Phase %s) - All streams\' validation criteria were met.' %
-                phase_id)
+                "(Phase %s) - All streams' validation criteria were met." % phase_id
+            )
             return True
         else:
             self.log.error(
-                '(Phase %s) - At least one stream validation criterion was not '
-                'met.' % phase_id)
+                "(Phase %s) - At least one stream validation criterion was not "
+                "met." % phase_id
+            )
             return False
 
     def graph_test(self, phases, max_bw):
-        """ Outputs a bokeh html graph of the streams. Saves to ACTS log
+        """Outputs a bokeh html graph of the streams. Saves to ACTS log
         directory.
 
         Args:
@@ -405,14 +443,15 @@
             from bokeh.plotting import ColumnDataSource, figure, output_file, save
             from bokeh.models import Span, Label
         except ImportError as e:
-            self.log.warn("bokeh is not installed: skipping creation of graphs. "
-                          "Note CSV files are still available. If graphs are "
-                          "desired, install antlion with the \"bokeh\" feature.")
+            self.log.warn(
+                "bokeh is not installed: skipping creation of graphs. "
+                "Note CSV files are still available. If graphs are "
+                'desired, install antlion with the "bokeh" feature.'
+            )
             return
 
         output_path = context.get_current_context().get_base_output_path()
-        output_file_name = '%s/WlanWmmTest/%s.html' % (output_path,
-                                                       self.test_name)
+        output_file_name = "%s/WlanWmmTest/%s.html" % (output_path, self.test_name)
         output_file(output_file_name)
 
         start_time = 0
@@ -425,113 +464,127 @@
         for phase_id, phase in phases.items():
             longest_stream_time = 0
             for stream_id, stream in phase.items():
-                transmitter = stream['transmitter']
-                uuid = stream['uuid']
+                transmitter = stream["transmitter"]
+                uuid = stream["uuid"]
 
-                if 'bandwidth' in stream:
-                    stream_bw = "{:.3f}".format(stream['bandwidth'])
-                    stream_bw_formula_str = '%sMb/s' % stream_bw
-                elif 'max_bandwidth_percentage' in stream:
-                    max_bw_percentage = stream['max_bandwidth_percentage']
+                if "bandwidth" in stream:
+                    stream_bw = "{:.3f}".format(stream["bandwidth"])
+                    stream_bw_formula_str = "%sMb/s" % stream_bw
+                elif "max_bandwidth_percentage" in stream:
+                    max_bw_percentage = stream["max_bandwidth_percentage"]
                     stream_bw = "{:.3f}".format(max_bw * max_bw_percentage)
-                    stream_bw_formula_str = '%sMb/s (%s%% of max bandwidth)' % (
-                        stream_bw, str(max_bw_percentage * 100))
+                    stream_bw_formula_str = "%sMb/s (%s%% of max bandwidth)" % (
+                        stream_bw,
+                        str(max_bw_percentage * 100),
+                    )
                 else:
                     raise AttributeError(
-                        'Stream %s must have either a bandwidth or '
-                        'max_bandwidth_percentage parameter.' % stream_id)
+                        "Stream %s must have either a bandwidth or "
+                        "max_bandwidth_percentage parameter." % stream_id
+                    )
 
-                stream_time = stream.get('time', DEFAULT_STREAM_TIME)
+                stream_time = stream.get("time", DEFAULT_STREAM_TIME)
                 longest_stream_time = max(longest_stream_time, stream_time)
 
                 avg_rate = transmitter.get_results(uuid).avg_rate
 
-                instantaneous_rates = transmitter.get_results(
-                    uuid).instantaneous_rates
-                highest_stream_bw = max(highest_stream_bw,
-                                        max(instantaneous_rates))
-                lowest_stream_bw = min(lowest_stream_bw,
-                                       min(instantaneous_rates))
+                instantaneous_rates = transmitter.get_results(uuid).instantaneous_rates
+                highest_stream_bw = max(highest_stream_bw, max(instantaneous_rates))
+                lowest_stream_bw = min(lowest_stream_bw, min(instantaneous_rates))
 
                 stream_data = ColumnDataSource(
-                    dict(time=[
-                        x for x in range(start_time, start_time + stream_time)
-                    ],
+                    dict(
+                        time=[x for x in range(start_time, start_time + stream_time)],
                         instantaneous_bws=instantaneous_rates,
                         avg_bw=[avg_rate for _ in range(stream_time)],
                         stream_id=[stream_id for _ in range(stream_time)],
                         attempted_bw=[
-                        stream_bw_formula_str for _ in range(stream_time)
-                    ]))
+                            stream_bw_formula_str for _ in range(stream_time)
+                        ],
+                    )
+                )
                 line = {
-                    'x_axis': 'time',
-                    'y_axis': 'instantaneous_bws',
-                    'source': stream_data,
-                    'line_width': GRAPH_DEFAULT_LINE_WIDTH,
-                    'legend_label': '%s:%s' % (phase_id, stream_id)
+                    "x_axis": "time",
+                    "y_axis": "instantaneous_bws",
+                    "source": stream_data,
+                    "line_width": GRAPH_DEFAULT_LINE_WIDTH,
+                    "legend_label": "%s:%s" % (phase_id, stream_id),
                 }
                 graph_lines.append(line)
 
             start_time = start_time + longest_stream_time
-        TOOLTIPS = [('Time', '@time'),
-                    ('Attempted Bandwidth', '@attempted_bw'),
-                    ('Instantaneous Bandwidth', '@instantaneous_bws'),
-                    ('Stream Average Bandwidth', '@avg_bw'),
-                    ('Stream', '@stream_id')]
+        TOOLTIPS = [
+            ("Time", "@time"),
+            ("Attempted Bandwidth", "@attempted_bw"),
+            ("Instantaneous Bandwidth", "@instantaneous_bws"),
+            ("Stream Average Bandwidth", "@avg_bw"),
+            ("Stream", "@stream_id"),
+        ]
 
         # Create and scale graph appropriately
         time_vs_bandwidth_graph = figure(
-            title=('Bandwidth for %s' % self.test_name),
-            x_axis_label='Time',
-            y_axis_label='Bandwidth',
+            title=("Bandwidth for %s" % self.test_name),
+            x_axis_label="Time",
+            y_axis_label="Bandwidth",
             tooltips=TOOLTIPS,
-            y_range=(lowest_stream_bw -
-                     (0.5 * (highest_stream_bw - lowest_stream_bw)),
-                     1.05 * max_bw))
-        time_vs_bandwidth_graph.sizing_mode = 'stretch_both'
-        time_vs_bandwidth_graph.title.align = 'center'
+            y_range=(
+                lowest_stream_bw - (0.5 * (highest_stream_bw - lowest_stream_bw)),
+                1.05 * max_bw,
+            ),
+        )
+        time_vs_bandwidth_graph.sizing_mode = "stretch_both"
+        time_vs_bandwidth_graph.title.align = "center"
         colors = Category10[GRAPH_COLOR_LEN]
         color_ind = 0
 
         # Draw max bandwidth line
-        max_bw_span = Span(location=max_bw,
-                           dimension='width',
-                           line_color='black',
-                           line_dash='dashed',
-                           line_width=GRAPH_DEFAULT_LINE_WIDTH)
-        max_bw_label = Label(x=(0.5 * start_time),
-                             y=max_bw,
-                             text=('Max Bandwidth: %sMb/s' % max_bw),
-                             text_align='center')
+        max_bw_span = Span(
+            location=max_bw,
+            dimension="width",
+            line_color="black",
+            line_dash="dashed",
+            line_width=GRAPH_DEFAULT_LINE_WIDTH,
+        )
+        max_bw_label = Label(
+            x=(0.5 * start_time),
+            y=max_bw,
+            text=("Max Bandwidth: %sMb/s" % max_bw),
+            text_align="center",
+        )
         time_vs_bandwidth_graph.add_layout(max_bw_span)
         time_vs_bandwidth_graph.add_layout(max_bw_label)
 
         # Draw stream lines
         for line in graph_lines:
-            time_vs_bandwidth_graph.line(line['x_axis'],
-                                         line['y_axis'],
-                                         source=line['source'],
-                                         line_width=line['line_width'],
-                                         legend_label=line['legend_label'],
-                                         color=colors[color_ind])
-            time_vs_bandwidth_graph.circle(line['x_axis'],
-                                           line['y_axis'],
-                                           source=line['source'],
-                                           size=GRAPH_DEFAULT_CIRCLE_SIZE,
-                                           legend_label=line['legend_label'],
-                                           color=colors[color_ind])
+            time_vs_bandwidth_graph.line(
+                line["x_axis"],
+                line["y_axis"],
+                source=line["source"],
+                line_width=line["line_width"],
+                legend_label=line["legend_label"],
+                color=colors[color_ind],
+            )
+            time_vs_bandwidth_graph.circle(
+                line["x_axis"],
+                line["y_axis"],
+                source=line["source"],
+                size=GRAPH_DEFAULT_CIRCLE_SIZE,
+                legend_label=line["legend_label"],
+                color=colors[color_ind],
+            )
             color_ind = (color_ind + 1) % GRAPH_COLOR_LEN
         time_vs_bandwidth_graph.legend.location = "top_left"
         time_vs_bandwidth_graph.legend.click_policy = "hide"
         graph_file = save([time_vs_bandwidth_graph])
-        self.log.info('Saved graph to %s' % graph_file)
+        self.log.info("Saved graph to %s" % graph_file)
 
-    def run_wmm_test(self,
-                     phases,
-                     ap_parameters=DEFAULT_AP_PARAMS,
-                     wmm_parameters=hostapd_constants.
-                     WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                     stream_timeout=DEFAULT_STREAM_TIMEOUT):
+    def run_wmm_test(
+        self,
+        phases,
+        ap_parameters=DEFAULT_AP_PARAMS,
+        wmm_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+        stream_timeout=DEFAULT_STREAM_TIMEOUT,
+    ):
         """Runs a WMM test case.
 
         Args:
@@ -548,20 +601,18 @@
             FAIL, otherwise
         """
         # Setup AP
-        subnet_str = self.start_ap_with_wmm_params(ap_parameters,
-                                                   wmm_parameters)
+        subnet_str = self.start_ap_with_wmm_params(ap_parameters, wmm_parameters)
         # Determine transmitters and receivers used in test case
         transmitters = set()
         receivers = set()
         for phase in phases.values():
             for stream in phase.values():
-                transmitter = self.wmm_transceiver_map[
-                    stream['transmitter_str']]
+                transmitter = self.wmm_transceiver_map[stream["transmitter_str"]]
                 transmitters.add(transmitter)
-                stream['transmitter'] = transmitter
-                receiver = self.wmm_transceiver_map[stream['receiver_str']]
+                stream["transmitter"] = transmitter
+                receiver = self.wmm_transceiver_map[stream["receiver_str"]]
                 receivers.add(receiver)
-                stream['receiver'] = receiver
+                stream["receiver"] = receiver
         transceivers = transmitters.union(receivers)
 
         # Associate all transceivers with wlan_devices
@@ -570,45 +621,47 @@
                 self.associate_transceiver(tc, ap_parameters)
 
         # Determine link max bandwidth
-        self.log.info('Determining link maximum bandwidth.')
+        self.log.info("Determining link maximum bandwidth.")
         uuid = self.staut.run_synchronous_traffic_stream(
-            {'receiver': self.access_point_transceiver}, subnet_str)
+            {"receiver": self.access_point_transceiver}, subnet_str
+        )
         max_bw = self.staut.get_results(uuid).avg_send_rate
-        self.log.info('Link maximum bandwidth: %s Mb/s' % max_bw)
+        self.log.info("Link maximum bandwidth: %s Mb/s" % max_bw)
 
         # Run parallel phases
         pass_test = True
         for phase_id, phase in phases.items():
-            self.log.info('Setting up phase: %s' % phase_id)
+            self.log.info("Setting up phase: %s" % phase_id)
 
             for stream_id, stream in phase.items():
-
-                transmitter = stream['transmitter']
-                receiver = stream['receiver']
-                access_category = stream.get('access_category', None)
-                stream_time = stream.get('time', DEFAULT_STREAM_TIME)
+                transmitter = stream["transmitter"]
+                receiver = stream["receiver"]
+                access_category = stream.get("access_category", None)
+                stream_time = stream.get("time", DEFAULT_STREAM_TIME)
 
                 # Determine stream type
-                if 'bandwidth' in stream:
-                    bw = stream['bandwidth']
-                elif 'max_bandwidth_percentage' in stream:
-                    max_bw_percentage = stream['max_bandwidth_percentage']
+                if "bandwidth" in stream:
+                    bw = stream["bandwidth"]
+                elif "max_bandwidth_percentage" in stream:
+                    max_bw_percentage = stream["max_bandwidth_percentage"]
                     bw = max_bw * max_bw_percentage
                 else:
                     raise AttributeError(
-                        'Stream %s must have either a bandwidth or '
-                        'max_bandwidth_percentage parameter.' % stream_id)
+                        "Stream %s must have either a bandwidth or "
+                        "max_bandwidth_percentage parameter." % stream_id
+                    )
 
                 stream_params = {
-                    'receiver': receiver,
-                    'access_category': access_category,
-                    'bandwidth': bw,
-                    'time': stream_time
+                    "receiver": receiver,
+                    "access_category": access_category,
+                    "bandwidth": bw,
+                    "time": stream_time,
                 }
 
                 uuid = transmitter.prepare_asynchronous_stream(
-                    stream_params, subnet_str)
-                stream['uuid'] = uuid
+                    stream_params, subnet_str
+                )
+                stream["uuid"] = uuid
 
             # Start all streams in phase
             start_time = time.time() + 5
@@ -621,8 +674,9 @@
                 while transmitter.has_active_streams:
                     if time.time() > end_time:
                         raise ConnectionError(
-                            'Transmitter\'s (%s) active streams are not finishing.'
-                            % transmitter.identifier)
+                            "Transmitter's (%s) active streams are not finishing."
+                            % transmitter.identifier
+                        )
                     time.sleep(1)
 
             # Cleanup all streams
@@ -631,19 +685,20 @@
 
             # Validate streams
             pass_test = pass_test and self.validate_streams_in_phase(
-                phase_id, phases, max_bw)
+                phase_id, phases, max_bw
+            )
 
         self.graph_test(phases, max_bw)
         if pass_test:
             asserts.explicit_pass(
-                'Validation criteria met for all streams in all phases.')
+                "Validation criteria met for all streams in all phases."
+            )
         else:
-            asserts.fail(
-                'At least one stream failed to meet validation criteria.')
+            asserts.fail("At least one stream failed to meet validation criteria.")
 
-# Test Cases
+    # Test Cases
 
-# Internal Traffic Differentiation
+    # Internal Traffic Differentiation
 
     def test_internal_traffic_diff_VO_VI(self):
         self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_VI)
@@ -663,186 +718,181 @@
     def test_internal_traffic_diff_BE_BK(self):
         self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_BE_BK)
 
-# External Traffic Differentiation
+    # External Traffic Differentiation
 
     """Single station, STAUT transmits high priority"""
 
     def test_external_traffic_diff_staut_VO_ap_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VO_ap_VI)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VO_ap_VI)
 
     def test_external_traffic_diff_staut_VO_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BE)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BE)
 
     def test_external_traffic_diff_staut_VO_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BK)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BK)
 
     def test_external_traffic_diff_staut_VI_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BE)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BE)
 
     def test_external_traffic_diff_staut_VI_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BK)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BK)
 
     def test_external_traffic_diff_staut_BE_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BE_ap_BK)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BE_ap_BK)
 
     """Single station, STAUT transmits low priority"""
 
     def test_external_traffic_diff_staut_VI_ap_VO(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VI_ap_VO)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VI_ap_VO)
 
     def test_external_traffic_diff_staut_BE_ap_VO(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VO)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VO)
 
     def test_external_traffic_diff_staut_BK_ap_VO(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VO)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VO)
 
     def test_external_traffic_diff_staut_BE_ap_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VI)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VI)
 
     def test_external_traffic_diff_staut_BK_ap_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VI)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VI)
 
     def test_external_traffic_diff_staut_BK_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BK_ap_BE)
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BK_ap_BE)
 
-# # Dual Internal/External Traffic Differentiation (Single station)
+    # # Dual Internal/External Traffic Differentiation (Single station)
 
     def test_dual_traffic_diff_staut_VO_VI_ap_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VO_VI_ap_VI)
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VO_VI_ap_VI)
 
     def test_dual_traffic_diff_staut_VO_BE_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VO_BE_ap_BE)
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VO_BE_ap_BE)
 
     def test_dual_traffic_diff_staut_VO_BK_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VO_BK_ap_BK)
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VO_BK_ap_BK)
 
     def test_dual_traffic_diff_staut_VI_BE_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VI_BE_ap_BE)
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VI_BE_ap_BE)
 
     def test_dual_traffic_diff_staut_VI_BK_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VI_BK_ap_BK)
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VI_BK_ap_BK)
 
     def test_dual_traffic_diff_staut_BE_BK_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_BE_BK_ap_BK)
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_BE_BK_ap_BK)
 
-# ACM Bit Conformance Tests (Single station, as WFA test below uses two)
+    # ACM Bit Conformance Tests (Single station, as WFA test below uses two)
 
     def test_acm_bit_on_VI(self):
         wmm_params_VI_ACM = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        self.run_wmm_test(wmm_test_cases.test_acm_bit_on_VI,
-                          wmm_parameters=wmm_params_VI_ACM)
+            hostapd_constants.WMM_ACM_VI,
+        )
+        self.run_wmm_test(
+            wmm_test_cases.test_acm_bit_on_VI, wmm_parameters=wmm_params_VI_ACM
+        )
 
-# AC Parameter Modificiation Tests (Single station, as WFA test below uses two)
+    # AC Parameter Modificiation Tests (Single station, as WFA test below uses two)
 
     def test_ac_param_degrade_VO(self):
         self.run_wmm_test(
             wmm_test_cases.test_ac_param_degrade_VO,
-            wmm_parameters=hostapd_constants.WMM_DEGRADED_VO_PARAMS)
+            wmm_parameters=hostapd_constants.WMM_DEGRADED_VO_PARAMS,
+        )
 
     def test_ac_param_degrade_VI(self):
         self.run_wmm_test(
             wmm_test_cases.test_ac_param_degrade_VI,
-            wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS)
+            wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS,
+        )
 
     def test_ac_param_improve_BE(self):
         self.run_wmm_test(
             wmm_test_cases.test_ac_param_improve_BE,
-            wmm_parameters=hostapd_constants.WMM_IMPROVE_BE_PARAMS)
+            wmm_parameters=hostapd_constants.WMM_IMPROVE_BE_PARAMS,
+        )
 
     def test_ac_param_improve_BK(self):
         self.run_wmm_test(
             wmm_test_cases.test_ac_param_improve_BK,
-            wmm_parameters=hostapd_constants.WMM_IMPROVE_BK_PARAMS)
+            wmm_parameters=hostapd_constants.WMM_IMPROVE_BK_PARAMS,
+        )
 
-
-# WFA Test Plan Tests
+    # WFA Test Plan Tests
 
     """Traffic Differentiation in Single BSS (Single Station)"""
 
     def test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE(self):
         self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE)
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE
+        )
 
     def test_wfa_traffic_diff_single_station_staut_VI_BE(self):
         self.run_wmm_test(
-            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE)
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE
+        )
 
     def test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE(self):
         self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE)
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE
+        )
 
     def test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK(self):
         self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK)
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK
+        )
 
     def test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI(self):
         self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI)
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI
+        )
 
     """Traffic Differentiation in Single BSS (Two Stations)"""
 
     def test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
         self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE)
+            wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE
+        )
 
     def test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
         self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE)
+            wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE
+        )
 
     def test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
         self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK)
+            wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK
+        )
 
     def test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
         self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI)
+            wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI
+        )
 
     """Test ACM Bit Conformance (Two Stations)"""
 
     def test_wfa_acm_bit_on_VI(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
         wmm_params_VI_ACM = utils.merge_dicts(
             hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        self.run_wmm_test(wmm_test_cases.test_wfa_acm_bit_on_VI,
-                          wmm_parameters=wmm_params_VI_ACM)
+            hostapd_constants.WMM_ACM_VI,
+        )
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_acm_bit_on_VI, wmm_parameters=wmm_params_VI_ACM
+        )
 
     """Test the AC Parameter Modification"""
 
     def test_wfa_ac_param_degrade_VI(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
         self.run_wmm_test(
             wmm_test_cases.test_wfa_ac_param_degrade_VI,
-            wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS)
+            wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS,
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/channel_sweep_test_params.yaml b/src/antlion/tests/wlan/performance/channel_sweep_test_params.yaml
new file mode 100644
index 0000000..b70490b
--- /dev/null
+++ b/src/antlion/tests/wlan/performance/channel_sweep_test_params.yaml
@@ -0,0 +1,5408 @@
+channel_sweep_test_params:
+  skip_performance: false
+  debug_channel_performance_tests:
+    - test_name: test_random_2g_20mhz_channel
+      test_channels:
+        - 8
+      test_channel_bandwidth: 20
+    - test_name: test_random_dfs_5g_80mhz_channel
+      test_channels:
+        - 100
+      test_channel_bandwidth: 80
+    - test_name: test_random_nondfs_5g_80mhz_channel
+      test_channels:
+        - 157
+      test_channel_bandwidth: 80
+    - test_name: test_channel_165
+      test_channels:
+        - 165
+      test_channel_bandwidth: 20
+  regulatory_compliance_tests:
+    - test_name: test_SE_regulatory_compliance
+      country_code: SE
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_us_regulatory_compliance
+      country_code: US
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+    - test_name: test_MX_regulatory_compliance
+      country_code: MX
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+    - test_name: test_CA_regulatory_compliance
+      country_code: CA
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+    - test_name: test_IN_regulatory_compliance
+      country_code: IN
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+    - test_name: test_NZ_regulatory_compliance
+      country_code: NZ
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+    - test_name: test_GB_regulatory_compliance
+      country_code: GB
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_AU_regulatory_compliance
+      country_code: AU
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+    - test_name: test_JP_regulatory_compliance
+      country_code: JP
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+    - test_name: test_FR_regulatory_compliance
+      country_code: FR
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_DE_regulatory_compliance
+      country_code: DE
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_CH_regulatory_compliance
+      country_code: CH
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_BE_regulatory_compliance
+      country_code: BE
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_IE_regulatory_compliance
+      country_code: IE
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_NO_regulatory_compliance
+      country_code: "NO"
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_ES_regulatory_compliance
+      country_code: ES
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_IT_regulatory_compliance
+      country_code: IT
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_NL_regulatory_compliance
+      country_code: NL
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_DK_regulatory_compliance
+      country_code: DK
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+    - test_name: test_SG_regulatory_compliance
+      country_code: SG
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+    - test_name: test_WW_regulatory_compliance
+      country_code: WW
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+    - test_name: test_TW_regulatory_compliance
+      country_code: TW
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+    - test_name: test_AT_regulatory_compliance
+      country_code: AT
+      test_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "14":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
+        "144":
+          - 20
+          - 40
+          - 80
+        "149":
+          - 20
+          - 40
+          - 80
+        "153":
+          - 20
+          - 40
+          - 80
+        "157":
+          - 20
+          - 40
+          - 80
+        "161":
+          - 20
+          - 40
+          - 80
+        "165":
+          - 20
+      allowed_channels:
+        "1":
+          - 20
+        "2":
+          - 20
+        "3":
+          - 20
+        "4":
+          - 20
+        "5":
+          - 20
+        "6":
+          - 20
+        "7":
+          - 20
+        "8":
+          - 20
+        "9":
+          - 20
+        "10":
+          - 20
+        "11":
+          - 20
+        "12":
+          - 20
+        "13":
+          - 20
+        "36":
+          - 20
+          - 40
+          - 80
+        "40":
+          - 20
+          - 40
+          - 80
+        "44":
+          - 20
+          - 40
+          - 80
+        "48":
+          - 20
+          - 40
+          - 80
+        "52":
+          - 20
+          - 40
+          - 80
+        "56":
+          - 20
+          - 40
+          - 80
+        "60":
+          - 20
+          - 40
+          - 80
+        "64":
+          - 20
+          - 40
+          - 80
+        "100":
+          - 20
+          - 40
+          - 80
+        "104":
+          - 20
+          - 40
+          - 80
+        "108":
+          - 20
+          - 40
+          - 80
+        "112":
+          - 20
+          - 40
+          - 80
+        "116":
+          - 20
+          - 40
+          - 80
+        "120":
+          - 20
+          - 40
+          - 80
+        "124":
+          - 20
+          - 40
+          - 80
+        "128":
+          - 20
+          - 40
+          - 80
+        "132":
+          - 20
+          - 40
+          - 80
+        "136":
+          - 20
+          - 40
+          - 80
+        "140":
+          - 20
+          - 40
+          - 80
diff --git a/src/antlion/tests/wlan/performance/rvr_settings.yaml b/src/antlion/tests/wlan/performance/rvr_settings.yaml
new file mode 100644
index 0000000..620a48d
--- /dev/null
+++ b/src/antlion/tests/wlan/performance/rvr_settings.yaml
@@ -0,0 +1,7 @@
+rvr_settings:
+  starting_attn: 10
+  ending_attn: 61
+  step_size_in_db: 3
+  dwell_time_in_secs: 10
+  reverse_rvr_after_forward: true
+  iperf_flags: "-i 1"
diff --git a/src/antlion/tests/wlan_policy/BUILD.gn b/src/antlion/tests/wlan_policy/BUILD.gn
new file mode 100644
index 0000000..58dcfb6
--- /dev/null
+++ b/src/antlion/tests/wlan_policy/BUILD.gn
@@ -0,0 +1,51 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("hidden_networks_test") {
+  main_source = "HiddenNetworksTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("policy_scan_test") {
+  main_source = "PolicyScanTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("regulatory_recovery_test") {
+  main_source = "RegulatoryRecoveryTest.py"
+  environments = display_envs
+}
+
+antlion_host_test("saved_networks_test") {
+  main_source = "SavedNetworksTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("start_stop_client_connections_test") {
+  main_source = "StartStopClientConnectionsTest.py"
+  environments = display_ap_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":hidden_networks_test($host_toolchain)",
+    ":policy_scan_test($host_toolchain)",
+    ":regulatory_recovery_test($host_toolchain)",
+    ":saved_networks_test($host_toolchain)",
+    ":start_stop_client_connections_test($host_toolchain)",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    ":hidden_networks_test($host_toolchain)",
+  ]
+}
diff --git a/src/antlion/tests/wlan_policy/HiddenNetworksTest.py b/src/antlion/tests/wlan_policy/HiddenNetworksTest.py
index aa2c7ba..2928304 100644
--- a/src/antlion/tests/wlan_policy/HiddenNetworksTest.py
+++ b/src/antlion/tests/wlan_policy/HiddenNetworksTest.py
@@ -16,13 +16,13 @@
 
 import time
 
-from antlion import signals
 from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
+from antlion.test_utils.wifi import base_test
 from antlion.utils import rand_ascii_str
 
+from mobly import signals, test_runner
+
 # These tests should have a longer timeout for connecting than normal connect
 # tests because the device should probabilistically perform active scans for
 # hidden networks. Multiple scans are necessary to verify a very low chance of
@@ -36,8 +36,8 @@
 WPA2 = "wpa2"
 
 
-class HiddenNetworksTest(WifiBaseTest):
-    """ Tests that WLAN Policy will detect hidden networks
+class HiddenNetworksTest(base_test.WifiBaseTest):
+    """Tests that WLAN Policy will detect hidden networks
 
     Test Bed Requirement:
     * One or more Fuchsia devices
@@ -49,31 +49,33 @@
         # Start an AP with a hidden network
         self.hidden_ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
         self.access_point = self.access_points[0]
-        self.hidden_password = rand_ascii_str(
-            hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
+        self.hidden_password = rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
         self.hidden_security = WPA2
         security = hostapd_security.Security(
-            security_mode=self.hidden_security, password=self.hidden_password)
+            security_mode=self.hidden_security, password=self.hidden_password
+        )
 
         self.access_point.stop_all_aps()
-        setup_ap(self.access_point,
-                 'whirlwind',
-                 hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 self.hidden_ssid,
-                 hidden=True,
-                 security=security)
+        setup_ap(
+            self.access_point,
+            "whirlwind",
+            hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            self.hidden_ssid,
+            hidden=True,
+            security=security,
+        )
 
         if len(self.fuchsia_devices) < 1:
             raise EnvironmentError("No Fuchsia devices found.")
         for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
 
     def setup_test(self):
         for fd in self.fuchsia_devices:
             if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError(
-                    "Failed to remove all networks in setup")
+                raise EnvironmentError("Failed to remove all networks in setup")
 
     def teardown_class(self):
         self.access_point.stop_all_aps()
@@ -88,9 +90,8 @@
             # which would interfere with requested scans.
             fd.wlan_policy_controller.stop_client_connections()
             if not fd.wlan_policy_controller.save_network(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    password=self.hidden_password):
+                self.hidden_ssid, self.hidden_security, password=self.hidden_password
+            ):
                 raise EnvironmentError("Failed to save network")
             fd.wlan_policy_controller.start_client_connections()
             start_time = time.time()
@@ -100,26 +101,28 @@
                 num_performed_scans = num_performed_scans + 1
                 scan_result = fd.sl4f.wlan_policy_lib.wlanScanForNetworks()
                 if scan_result["error"] != None:
-                    self.log.warn("Failed to scan for networks with error %s" %
-                                  scan_result["error"])
+                    self.log.warn(
+                        "Failed to scan for networks with error %s"
+                        % scan_result["error"]
+                    )
                     continue
                 else:
                     scans = scan_result["result"]
                 if self.hidden_ssid in scans:
                     self.log.info(
-                        "SSID of hidden network seen after %d scans" %
-                        num_performed_scans)
+                        "SSID of hidden network seen after %d scans"
+                        % num_performed_scans
+                    )
                     return
                 # Don't overload SL4F with scan requests
                 time.sleep(1)
 
-            self.log.error("Failed to see SSID after %d scans" %
-                           num_performed_scans)
+            self.log.error("Failed to see SSID after %d scans" % num_performed_scans)
             raise signals.TestFailure("Failed to see hidden network in scans")
 
     def test_auto_connect_hidden_on_startup(self):
-        """ Test that if we are not connected to anything but have a hidden
-            network saved, we will eventually actively scan for it and connect."""
+        """Test that if we are not connected to anything but have a hidden
+        network saved, we will eventually actively scan for it and connect."""
         # Start up AP with an open network with a random SSID
 
         for fd in self.fuchsia_devices:
@@ -129,23 +132,21 @@
 
             # Save the network.
             if not fd.wlan_policy_controller.save_network(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    password=self.hidden_password):
+                self.hidden_ssid, self.hidden_security, password=self.hidden_password
+            ):
                 raise EnvironmentError("Failed to save network")
 
             # Reboot the device and check that it auto connects.
             fd.reboot()
             if not fd.wlan_policy_controller.wait_for_connect(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    timeout=TIME_WAIT_FOR_CONNECT):
+                self.hidden_ssid, self.hidden_security, timeout=TIME_WAIT_FOR_CONNECT
+            ):
                 raise signals.TestFailure("Failed to connect to network")
 
     def test_auto_connect_hidden_on_save(self):
-        """ Test that if we save a hidden network and are not connected to
-            anything, the device will connect to the hidden network that was
-            just saved. """
+        """Test that if we save a hidden network and are not connected to
+        anything, the device will connect to the hidden network that was
+        just saved."""
         for fd in self.fuchsia_devices:
             if not fd.wlan_policy_controller.wait_for_no_connections():
                 self.log.info(
@@ -155,13 +156,15 @@
 
             # Save the network and make sure that we see the device auto connect to it.
             if not fd.wlan_policy_controller.save_network(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    password=self.hidden_password):
+                self.hidden_ssid, self.hidden_security, password=self.hidden_password
+            ):
                 raise EnvironmentError("Failed to save network")
 
             if not fd.wlan_policy_controller.wait_for_connect(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    timeout=TIME_WAIT_FOR_CONNECT):
+                self.hidden_ssid, self.hidden_security, timeout=TIME_WAIT_FOR_CONNECT
+            ):
                 raise signals.TestFailure("Failed to connect to network")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan_policy/PolicyScanTest.py b/src/antlion/tests/wlan_policy/PolicyScanTest.py
index ca05e6a..6df3291 100644
--- a/src/antlion/tests/wlan_policy/PolicyScanTest.py
+++ b/src/antlion/tests/wlan_policy/PolicyScanTest.py
@@ -18,13 +18,18 @@
 
 from datetime import datetime
 
-from antlion import signals
-from antlion.controllers.ap_lib import (hostapd_ap_preset, hostapd_bss_settings,
-                                     hostapd_constants, hostapd_security)
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.controllers.ap_lib import (
+    hostapd_ap_preset,
+    hostapd_bss_settings,
+    hostapd_constants,
+    hostapd_security,
+)
+from antlion.test_utils.wifi import base_test
+
+from mobly import signals, test_runner
 
 
-class PolicyScanTest(WifiBaseTest):
+class PolicyScanTest(base_test.WifiBaseTest):
     """WLAN policy scan test class.
 
     Test Bed Requirement:
@@ -37,8 +42,9 @@
         if len(self.fuchsia_devices) < 1:
             raise signals.TestFailure("No fuchsia devices found.")
         for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
         if len(self.access_points) < 1:
             raise signals.TestFailure("No access points found.")
         # Prepare the AP
@@ -59,26 +65,34 @@
                 ssid=self.wpa2_network_2g["SSID"],
                 security=hostapd_security.Security(
                     security_mode=self.wpa2_network_2g["security"],
-                    password=self.wpa2_network_2g["password"])))
+                    password=self.wpa2_network_2g["password"],
+                ),
+            )
+        )
         bss_settings_5g.append(
             hostapd_bss_settings.BssSettings(
                 name=self.wpa2_network_5g["SSID"],
                 ssid=self.wpa2_network_5g["SSID"],
                 security=hostapd_security.Security(
                     security_mode=self.wpa2_network_5g["security"],
-                    password=self.wpa2_network_5g["password"])))
+                    password=self.wpa2_network_5g["password"],
+                ),
+            )
+        )
         self.ap_2g = hostapd_ap_preset.create_ap_preset(
             iface_wlan_2g=self.access_points[0].wlan_2g,
             iface_wlan_5g=self.access_points[0].wlan_5g,
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.open_network_2g["SSID"],
-            bss_settings=bss_settings_2g)
+            bss_settings=bss_settings_2g,
+        )
         self.ap_5g = hostapd_ap_preset.create_ap_preset(
             iface_wlan_2g=self.access_points[0].wlan_2g,
             iface_wlan_5g=self.access_points[0].wlan_5g,
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.open_network_5g["SSID"],
-            bss_settings=bss_settings_5g)
+            bss_settings=bss_settings_5g,
+        )
         # Start the networks
         self.access_point.start_ap(hostapd_config=self.ap_2g)
         self.access_point.start_ap(hostapd_config=self.ap_5g)
@@ -93,7 +107,8 @@
     def setup_test(self):
         for fd in self.fuchsia_devices:
             # stub for setting up all the fuchsia devices in the testbed.
-            return fd.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections(
+            return (
+                fd.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections()
             )
 
     def teardown_test(self):
@@ -121,7 +136,7 @@
     """Helper Functions"""
 
     def perform_scan(self, fd):
-        """ Initiates scan on a Fuchsia device and returns results
+        """Initiates scan on a Fuchsia device and returns results
 
         Args:
             fd: A fuchsia device
@@ -140,8 +155,10 @@
         # first check if we received an error
         if scan_response.get("error") is not None:
             # the response indicates an error - log and raise failure
-            raise signals.TestFailure("Aborting test - scan failed with "
-                                      "error: %s" % scan_response.get("error"))
+            raise signals.TestFailure(
+                "Aborting test - scan failed with "
+                "error: %s" % scan_response.get("error")
+            )
 
         # the scan command did not get an error response - go ahead
         # and check for scan results
@@ -154,7 +171,7 @@
         return scan_results
 
     def connect_to_network(self, wlan_network_params, fd):
-        """ Connects the Fuchsia device to the specified network
+        """Connects the Fuchsia device to the specified network
 
         Args:
             wlan_network_params: A dictionary containing wlan information.
@@ -169,13 +186,14 @@
 
         # TODO(mnck): use the Policy version of this call, when it is available.
         connection_response = fd.wlan_policy_controller.save_and_connect(
-            target_ssid, target_security, password=target_pwd)
+            target_ssid, target_security, password=target_pwd
+        )
         if not connection_response:
             raise signals.TestFailure("Aborting test - Connect call failed")
         self.log.info("Network connection successful.")
 
     def assert_network_is_in_results(self, scan_results, *, ssid):
-        """ Verified scan results contain a specified network
+        """Verified scan results contain a specified network
 
         Args:
             scan_results: Scan results from a fuchsia Policy API scan
@@ -187,8 +205,8 @@
         """
         if ssid not in scan_results:
             raise signals.TestFailure(
-                'Network "%s" was not found in scan results: %s', ssid,
-                scan_results)
+                'Network "%s" was not found in scan results: %s', ssid, scan_results
+            )
 
     """Tests"""
 
@@ -197,8 +215,7 @@
         for fd in self.fuchsia_devices:
             scan_results = self.perform_scan(fd)
             if len(scan_results) == 0:
-                raise signals.TestFailure("Scan failed or did not "
-                                          "find any networks")
+                raise signals.TestFailure("Scan failed or did not " "find any networks")
             for ssid in self.all_ssids:
                 self.assert_network_is_in_results(scan_results, ssid=ssid)
 
@@ -233,3 +250,7 @@
             scan_results = self.perform_scan(fd)
             for ssid in self.all_ssids:
                 self.assert_network_is_in_results(scan_results, ssid=ssid)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py b/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py
index 3e308bb..00c7a84 100644
--- a/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py
+++ b/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py
@@ -14,11 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import signals
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.test_utils.wifi import base_test
+
+from mobly import signals, test_runner
 
 
-class RegulatoryRecoveryTest(WifiBaseTest):
+class RegulatoryRecoveryTest(base_test.WifiBaseTest):
     """Tests the policy layer's response to setting country code.
 
     Test Bed Requirements:
@@ -39,13 +40,13 @@
             raise EnvironmentError("No Fuchsia devices found.")
 
         self.config_test_params = self.user_params.get(
-            "regulatory_recovery_test_params", {})
+            "regulatory_recovery_test_params", {}
+        )
         self.country_code = self.config_test_params.get("country_code", "US")
-        self.negative_test = self.config_test_params.get(
-            "negative_test", False)
+        self.negative_test = self.config_test_params.get("negative_test", False)
 
         for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy')
+            fd.configure_wlan(association_mechanism="policy")
 
     def teardown_class(self):
         if not self.negative_test:
@@ -60,8 +61,7 @@
             # connecting to a network, they merely verify whether or not client
             # mode is functional.
             if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError(
-                    "Failed to remove all networks in setup")
+                raise EnvironmentError("Failed to remove all networks in setup")
 
             # To ensure that DUTs are initially in a known state, set all of
             # their PHYs to world-wide mode.  Also disable client and AP
@@ -86,8 +86,7 @@
             # The negative test case should have failed to set the country
             # code and the positive test case should succeed.
             if self.negative_test:
-                raise EnvironmentError(
-                    "Setting invalid country code succeeded.")
+                raise EnvironmentError("Setting invalid country code succeeded.")
             else:
                 pass
 
@@ -108,22 +107,26 @@
             client_state = fd.sl4f.wlan_policy_lib.wlanGetUpdate()
             if client_state["error"]:
                 raise signals.TestFailure(
-                    "error querying client state: {}".format(
-                        client_state["error"]))
+                    "error querying client state: {}".format(client_state["error"])
+                )
             elif client_state["result"]["state"] != "ConnectionsDisabled":
                 raise signals.TestFailure(
                     "client connections in unexpected state: {}".format(
-                        client_state["result"]["state"]))
+                        client_state["result"]["state"]
+                    )
+                )
 
             ap_state = fd.sl4f.wlan_ap_policy_lib.wlanGetUpdate()
             if ap_state["error"]:
-                raise signals.TestFailure("error querying AP state: {}".format(
-                    ap_state["error"]))
+                raise signals.TestFailure(
+                    "error querying AP state: {}".format(ap_state["error"])
+                )
 
             ap_updates = ap_state["result"]
             if ap_updates:
                 raise signals.TestFailure(
-                    "AP in unexpected state: {}".format(ap_updates))
+                    "AP in unexpected state: {}".format(ap_updates)
+                )
 
     def test_interfaces_recreated_when_initially_enabled(self):
         """This test ensures that after a new regulatory region is applied
@@ -137,7 +140,8 @@
             # country code.
             fd.wlan_policy_controller.start_client_connections()
             fd.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-                test_ssid, test_security_type, "", "local_only", "any")
+                test_ssid, test_security_type, "", "local_only", "any"
+            )
 
             # Set the country code.
             self.set_country_code(fd)
@@ -151,24 +155,33 @@
             client_state = fd.sl4f.wlan_policy_lib.wlanGetUpdate()
             if client_state["error"]:
                 raise signals.TestFailure(
-                    "error querying client state: {}".format(
-                        client_state["error"]))
+                    "error querying client state: {}".format(client_state["error"])
+                )
             elif client_state["result"]["state"] != "ConnectionsEnabled":
                 raise signals.TestFailure(
                     "client connections in unexpected state: {}".format(
-                        client_state["result"]["state"]))
+                        client_state["result"]["state"]
+                    )
+                )
 
             ap_state = fd.sl4f.wlan_ap_policy_lib.wlanGetUpdate()
             if ap_state["error"]:
-                raise signals.TestFailure("error querying AP state: {}".format(
-                    ap_state["error"]))
+                raise signals.TestFailure(
+                    "error querying AP state: {}".format(ap_state["error"])
+                )
 
             ap_updates = ap_state["result"]
             if len(ap_updates) != 1:
-                raise signals.TestFailure(
-                    "No APs are running: {}".format(ap_updates))
+                raise signals.TestFailure("No APs are running: {}".format(ap_updates))
             else:
-                if ap_updates[0]["id"]["ssid"] != test_ssid or ap_updates[0][
-                        "id"]["type_"].lower() != test_security_type:
+                if (
+                    ap_updates[0]["id"]["ssid"] != test_ssid
+                    or ap_updates[0]["id"]["type_"].lower() != test_security_type
+                ):
                     raise signals.TestFailure(
-                        "AP in unexpected state: {}".format(ap_updates[0]))
+                        "AP in unexpected state: {}".format(ap_updates[0])
+                    )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan_policy/SavedNetworksTest.py b/src/antlion/tests/wlan_policy/SavedNetworksTest.py
index ca9ade2..73165c7 100644
--- a/src/antlion/tests/wlan_policy/SavedNetworksTest.py
+++ b/src/antlion/tests/wlan_policy/SavedNetworksTest.py
@@ -18,13 +18,13 @@
 remove through the ClientController API of WLAN policy.
 """
 
-from antlion import signals
 from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
+from antlion.test_utils.wifi import base_test
 from antlion.utils import rand_ascii_str, rand_hex_str
 
+from mobly import signals, test_runner
+
 PSK_LEN = 64
 TIME_WAIT_FOR_DISCONNECT = 30
 TIME_WAIT_FOR_CONNECT = 30
@@ -44,7 +44,7 @@
 CREDENTIAL_VALUE_NONE = ""
 
 
-class SavedNetworksTest(WifiBaseTest):
+class SavedNetworksTest(base_test.WifiBaseTest):
     """WLAN policy commands test class.
 
     Test Bed Requirement:
@@ -58,14 +58,14 @@
         if len(self.fuchsia_devices) < 1:
             raise EnvironmentError("No Fuchsia devices found.")
         for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
 
     def setup_test(self):
         for fd in self.fuchsia_devices:
             if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError(
-                    "Failed to remove all networks in setup")
+                raise EnvironmentError("Failed to remove all networks in setup")
         self.access_points[0].stop_all_aps()
 
     def teardown_class(self):
@@ -74,7 +74,7 @@
         self.access_points[0].stop_all_aps()
 
     def save_bad_network(self, fd, ssid, security_type, password=""):
-        """ Saves a network as specified on the given device and verify that we
+        """Saves a network as specified on the given device and verify that we
         Args:
             fd: The Fuchsia device to save the network on
             ssid: The SSID or name of the network to save.
@@ -83,17 +83,18 @@
             password: The password to save for the network. Empty string represents
                     no password, and PSK should be provided as 64 character hex string.
         """
-        if fd.wlan_policy_controller.save_network(ssid,
-                                                  security_type,
-                                                  password=password):
+        if fd.wlan_policy_controller.save_network(
+            ssid, security_type, password=password
+        ):
             self.log.info(
-                "Attempting to save bad network config %s did not give an error"
-                % ssid)
+                "Attempting to save bad network config %s did not give an error" % ssid
+            )
             raise signals.TestFailure("Failed to get error saving bad network")
 
-    def check_get_saved_network(self, fd, ssid, security_type, credential_type,
-                                credential_value):
-        """ Verify that get saved networks sees the single specified network. Used
+    def check_get_saved_network(
+        self, fd, ssid, security_type, credential_type, credential_value
+    ):
+        """Verify that get saved networks sees the single specified network. Used
             for the tests that save and get a single network. Maps security types of
             expected and actual to be case insensitive.
         Args:
@@ -105,16 +106,18 @@
                             "none", "password", or "psk".
             credential_value: The actual credential, or "" if there is no credential.
         """
-        expected_networks = [{
-            "ssid": ssid,
-            "security_type": security_type,
-            "credential_type": credential_type,
-            "credential_value": credential_value
-        }]
+        expected_networks = [
+            {
+                "ssid": ssid,
+                "security_type": security_type,
+                "credential_type": credential_type,
+                "credential_value": credential_value,
+            }
+        ]
         self.check_saved_networks(fd, expected_networks)
 
     def check_saved_networks(self, fd, expected_networks):
-        """ Verify that the saved networks we get from the device match the provided
+        """Verify that the saved networks we get from the device match the provided
             list of networks.
         Args:
             fd: The Fuchsia device to run on.
@@ -125,24 +128,27 @@
                             no duplicates in expected networks.
         """
         actual_networks = list(
-            map(self.lower_case_network,
-                fd.wlan_policy_controller.get_saved_networks()))
+            map(self.lower_case_network, fd.wlan_policy_controller.get_saved_networks())
+        )
         expected_networks = list(
-            map(self.lower_case_network,
-                fd.wlan_policy_controller.get_saved_networks()))
+            map(self.lower_case_network, fd.wlan_policy_controller.get_saved_networks())
+        )
 
         if len(actual_networks) != len(expected_networks):
             self.log.info(
                 "Number of expected saved networks does not match the actual number."
-                "Expected: %d, actual: %d" %
-                (len(actual_networks), len(expected_networks)))
+                "Expected: %d, actual: %d"
+                % (len(actual_networks), len(expected_networks))
+            )
             raise signals.TestFailure(
-                "Failed to get the expected number of saved networks")
+                "Failed to get the expected number of saved networks"
+            )
         for network in actual_networks:
             if network not in expected_networks:
                 self.log.info(
                     "Actual and expected networks do not match. Actual: %s,\n"
-                    "Expected: %s" % (actual_networks, expected_networks))
+                    "Expected: %s" % (actual_networks, expected_networks)
+                )
                 raise signals.TestFailure("Got an unexpected saved network")
 
     def lower_case_network(self, network):
@@ -155,7 +161,7 @@
         {"ssid": network["ssid"], "security_type": network["security_type"]}
 
     def save_and_check_network(self, ssid, security_type, password=""):
-        """ Perform a test for saving, getting, and removing a single network on each
+        """Perform a test for saving, getting, and removing a single network on each
             device.
         Args:
             ssid: The network name to use.
@@ -166,14 +172,15 @@
         """
         for fd in self.fuchsia_devices:
             if not fd.wlan_policy_controller.save_network(
-                    ssid, security_type, password=password):
+                ssid, security_type, password=password
+            ):
                 raise signals.TestFailure("Failed to save network")
-            self.check_get_saved_network(fd, ssid, security_type,
-                                         self.credentialType(password),
-                                         password)
+            self.check_get_saved_network(
+                fd, ssid, security_type, self.credentialType(password), password
+            )
 
     def start_ap(self, ssid, security_type, password=None, hidden=False):
-        """ Starts an access point.
+        """Starts an access point.
         Args:
             ssid: the SSID of the network to broadcast
             security_type: the security type of the network to be broadcasted. This can be
@@ -186,24 +193,26 @@
         if security_type == None or security_type.upper() == SECURITY_NONE:
             security = None
         else:
-            security = hostapd_security.Security(security_mode=security_type,
-                                                 password=password)
+            security = hostapd_security.Security(
+                security_mode=security_type, password=password
+            )
 
         if len(self.access_points) > 0:
             # Create an AP with default values other than the specified values.
-            setup_ap(self.access_points[0],
-                     'whirlwind',
-                     hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                     ssid,
-                     security=security)
+            setup_ap(
+                self.access_points[0],
+                "whirlwind",
+                hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+                ssid,
+                security=security,
+            )
 
         else:
-            self.log.error(
-                "No access point available for test, please check config")
+            self.log.error("No access point available for test, please check config")
             raise EnvironmentError("Failed to set up AP for test")
 
     def credentialType(self, credentialValue):
-        """ Returns the type of the credential to compare against values reported """
+        """Returns the type of the credential to compare against values reported"""
         if len(credentialValue) == PSK_LEN:
             return PSK
         elif len(credentialValue) == 0:
@@ -212,11 +221,12 @@
             return PASSWORD
 
     def same_network_identifier(self, net_id, ssid, security_type):
-        """ Returns true if the network id is made of the given ssid and security
-            type, and false otherwise. Security type check is case insensitive.
+        """Returns true if the network id is made of the given ssid and security
+        type, and false otherwise. Security type check is case insensitive.
         """
-        return net_id["ssid"] == ssid and net_id["type_"].upper(
-        ) == security_type.upper()
+        return (
+            net_id["ssid"] == ssid and net_id["type_"].upper() == security_type.upper()
+        )
 
     """Tests"""
 
@@ -224,8 +234,9 @@
         for fd in self.fuchsia_devices:
             # Save an open network with a password and verify that it fails to
             # save.
-            self.save_bad_network(fd, rand_ascii_str(10), SECURITY_NONE,
-                                  rand_ascii_str(8))
+            self.save_bad_network(
+                fd, rand_ascii_str(10), SECURITY_NONE, rand_ascii_str(8)
+            )
             self.check_saved_networks(fd, {})
 
     def test_open_network(self):
@@ -265,13 +276,13 @@
         password = rand_ascii_str(10)
         for fd in self.fuchsia_devices:
             if not fd.wlan_policy_controller.save_network(
-                    ssid, security, password=password):
+                ssid, security, password=password
+            ):
                 raise signals.TestFailure("Failed to save network")
             # Reboot the device. The network should be persistently saved
             # before the command is completed.
             fd.reboot()
-            self.check_get_saved_network(fd, ssid, security, PASSWORD,
-                                         password)
+            self.check_get_saved_network(fd, ssid, security, PASSWORD, password)
 
     def test_same_ssid_diff_security(self):
         for fd in self.fuchsia_devices:
@@ -279,22 +290,27 @@
             ssid = rand_ascii_str(19)
             password = rand_ascii_str(12)
             if not fd.wlan_policy_controller.save_network(
-                    ssid, WPA2, password=password):
+                ssid, WPA2, password=password
+            ):
                 raise signals.TestFailure("Failed to save network")
-            saved_networks.append({
-                "ssid": ssid,
-                "security_type": WPA2,
-                "credential_type": PASSWORD,
-                "credential_value": password
-            })
+            saved_networks.append(
+                {
+                    "ssid": ssid,
+                    "security_type": WPA2,
+                    "credential_type": PASSWORD,
+                    "credential_value": password,
+                }
+            )
             if not fd.wlan_policy_controller.save_network(ssid, SECURITY_NONE):
                 raise signals.TestFailure("Failed to save network")
-            saved_networks.append({
-                "ssid": ssid,
-                "security_type": SECURITY_NONE,
-                "credential_type": CREDENTIAL_TYPE_NONE,
-                "credential_value": CREDENTIAL_VALUE_NONE
-            })
+            saved_networks.append(
+                {
+                    "ssid": ssid,
+                    "security_type": SECURITY_NONE,
+                    "credential_type": CREDENTIAL_TYPE_NONE,
+                    "credential_value": CREDENTIAL_VALUE_NONE,
+                }
+            )
             actual_networks = fd.wlan_policy_controller.get_saved_networks()
             # Both should be saved and present in network store since the have
             # different security types and therefore different network identifiers.
@@ -314,13 +330,12 @@
             fd.wlan_policy_controller.wait_for_no_connections()
 
             if not fd.wlan_policy_controller.save_and_connect:
-                raise signals.TestFailure(
-                    "Failed to saved and connect to network")
+                raise signals.TestFailure("Failed to saved and connect to network")
 
-            if not fd.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections(
+            if (
+                not fd.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections()
             ):
-                raise signals.TestFailure(
-                    "Failed to disconnect from removed network")
+                raise signals.TestFailure("Failed to disconnect from removed network")
 
     def test_auto_connect_open(self):
         # Start up AP with an open network with a random SSID
@@ -333,10 +348,12 @@
             security = SECURITY_NONE
             password = CREDENTIAL_VALUE_NONE
             if not fd.wlan_policy_controller.save_network(
-                    ssid, security, password=password):
+                ssid, security, password=password
+            ):
                 raise signals.TestFailure("Failed to save network")
             if not fd.wlan_policy_controller.wait_for_connect(
-                    ssid, security, timeout=TIME_WAIT_FOR_CONNECT):
+                ssid, security, timeout=TIME_WAIT_FOR_CONNECT
+            ):
                 raise signals.TestFailure("Failed to connect to network")
 
     def test_auto_connect_wpa3(self):
@@ -350,8 +367,14 @@
 
             # Save the network and make sure that we see the device auto connect to it.
             if not fd.wlan_policy_controller.save_network(
-                    ssid, security, password=password):
+                ssid, security, password=password
+            ):
                 raise signals.TestFailure("Failed to save network")
             if not fd.wlan_policy_controller.wait_for_connect(
-                    ssid, security, timeout=TIME_WAIT_FOR_CONNECT):
+                ssid, security, timeout=TIME_WAIT_FOR_CONNECT
+            ):
                 raise signals.TestFailure("Failed to connect to network")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py b/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py
index 01a3094..0b87348 100644
--- a/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py
+++ b/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py
@@ -16,13 +16,13 @@
 
 import time
 
-from antlion import signals
 from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
+from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
+from antlion.test_utils.wifi import base_test
 from antlion.utils import rand_ascii_str
 
+from mobly import signals, test_runner
+
 DISCONNECTED = "Disconnected"
 CONNECTION_STOPPED = "ConnectionStopped"
 CONNECTIONS_ENABLED = "ConnectionsEnabled"
@@ -31,8 +31,8 @@
 UPDATE_TIMEOUT_SEC = 5
 
 
-class StartStopClientConnectionsTest(WifiBaseTest):
-    """ Tests that we see the expected behavior with enabling and disabling
+class StartStopClientConnectionsTest(base_test.WifiBaseTest):
+    """Tests that we see the expected behavior with enabling and disabling
         client connections
 
     Test Bed Requirement:
@@ -45,56 +45,59 @@
         # Start an AP with a hidden network
         self.ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
         self.access_point = self.access_points[0]
-        self.password = rand_ascii_str(
-            hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
+        self.password = rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
         self.security_type = WPA2
-        security = hostapd_security.Security(security_mode=self.security_type,
-                                             password=self.password)
+        security = hostapd_security.Security(
+            security_mode=self.security_type, password=self.password
+        )
 
         self.access_point.stop_all_aps()
         # TODO(63719) use varying values for AP that shouldn't affect the test.
-        setup_ap(self.access_point,
-                 'whirlwind',
-                 hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 self.ssid,
-                 security=security)
+        setup_ap(
+            self.access_point,
+            "whirlwind",
+            hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            self.ssid,
+            security=security,
+        )
 
         if len(self.fuchsia_devices) < 1:
             raise EnvironmentError("No Fuchsia devices found.")
         for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
 
     def setup_test(self):
         for fd in self.fuchsia_devices:
             if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError(
-                    "Failed to remove all networks in setup")
+                raise EnvironmentError("Failed to remove all networks in setup")
 
     def teardown_class(self):
         self.access_point.stop_all_aps()
 
     def connect_and_validate(self, fd, ssid, security_type, expected_response):
-        """ Sends a connect request to the device and verifies we get a response
-            without error. This does not validate that a connection will be
-            attempted. This will fail the test if there is an error sending the
-            connect request, or if we don't get the expected connect response."""
-        result_connect = fd.sl4f.wlan_policy_lib.wlanConnect(
-            ssid, security_type)
+        """Sends a connect request to the device and verifies we get a response
+        without error. This does not validate that a connection will be
+        attempted. This will fail the test if there is an error sending the
+        connect request, or if we don't get the expected connect response."""
+        result_connect = fd.sl4f.wlan_policy_lib.wlanConnect(ssid, security_type)
         if result_connect.get("error") != None:
-            self.log.error("Error occurred requesting a connection: %s" %
-                           result_connect.get("error"))
+            self.log.error(
+                "Error occurred requesting a connection: %s"
+                % result_connect.get("error")
+            )
             raise EnvironmentError("Failed to send connect request")
         response = result_connect.get("result")
         if response != expected_response:
             self.log.error(
-                "Incorrect connect request response. Expected: \"%s\", Actual: %s"
-                % (expected_response, response))
-            raise signals.TestFailure(
-                "Failed to get expected connect response")
+                'Incorrect connect request response. Expected: "%s", Actual: %s'
+                % (expected_response, response)
+            )
+            raise signals.TestFailure("Failed to get expected connect response")
 
     def await_state_update(self, fd, desired_state, timeout):
-        """ This function polls the policy client state until it converges to
+        """This function polls the policy client state until it converges to
             the caller's desired state.
 
         Args:
@@ -113,20 +116,21 @@
             fd.sl4f.wlan_policy_lib.wlanSetNewListener()
             curr_state = fd.sl4f.wlan_policy_lib.wlanGetUpdate()
             if curr_state.get("error"):
-                self.log.error("Error occurred getting status update: %s" %
-                               curr_state.get("error"))
+                self.log.error(
+                    "Error occurred getting status update: %s" % curr_state.get("error")
+                )
                 raise EnvironmentError("Failed to get update")
 
-            if curr_state.get("result") and curr_state.get(
-                    "result") == desired_state:
+            if curr_state.get("result") and curr_state.get("result") == desired_state:
                 return
 
             time.sleep(1)
 
         self.log.error(
             "Client state did not converge to the expected state in %s "
-            "seconds. Expected update: %s Actual update: %s" %
-            (timeout, desired_state, curr_state))
+            "seconds. Expected update: %s Actual update: %s"
+            % (timeout, desired_state, curr_state)
+        )
         raise signals.TestFailure("Client policy layer is in unexpected state")
 
     def test_stop_client_connections_update(self):
@@ -159,11 +163,13 @@
             # Save the network, otherwise connecting may fail because the
             # network is not saved instead of client connections being off
             if not fd.wlan_policy_controller.save_network(
-                    self.ssid, self.security_type, password=self.password):
+                self.ssid, self.security_type, password=self.password
+            ):
                 raise EnvironmentError("Failed to save network")
             expected_response = "RejectedIncompatibleMode"
-            self.connect_and_validate(fd, self.ssid, self.security_type,
-                                      expected_response)
+            self.connect_and_validate(
+                fd, self.ssid, self.security_type, expected_response
+            )
 
     def test_start_stop_client_connections(self):
         # Test that if we turn client connections on the device will connect,
@@ -171,25 +177,34 @@
         for fd in self.fuchsia_devices:
             # Start client connections and check that we can
             if not fd.wlan_policy_controller.save_network(
-                    self.ssid, self.security_type, password=self.password):
+                self.ssid, self.security_type, password=self.password
+            ):
                 raise EnvironmentError("Failed to save network")
             if not fd.wlan_policy_controller.start_client_connections():
                 raise EnvironmentError("Failed to start client connections")
 
             expected_response = "Acknowledged"
-            self.connect_and_validate(fd, self.ssid, self.security_type,
-                                      expected_response)
+            self.connect_and_validate(
+                fd, self.ssid, self.security_type, expected_response
+            )
 
             if not fd.wlan_policy_controller.wait_for_connect(
-                    self.ssid, self.security_type):
+                self.ssid, self.security_type
+            ):
                 raise signals.TestFailure(
-                    "Failed to connect after starting client connections")
+                    "Failed to connect after starting client connections"
+                )
 
             # Stop client connections again and check that we disconnect
             if not fd.wlan_policy_controller.stop_client_connections():
                 raise EnvironmentError("Failed to stop client connecions")
             if not fd.wlan_policy_controller.wait_for_disconnect(
-                    self.ssid, self.security_type, DISCONNECTED,
-                    CONNECTION_STOPPED):
+                self.ssid, self.security_type, DISCONNECTED, CONNECTION_STOPPED
+            ):
                 raise signals.TestFailure(
-                    "Failed to disconnect after client connections stopped")
+                    "Failed to disconnect after client connections stopped"
+                )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tracelogger.py b/src/antlion/tracelogger.py
index 1d2650e..1157379 100644
--- a/src/antlion/tracelogger.py
+++ b/src/antlion/tracelogger.py
@@ -26,21 +26,24 @@
     def _get_trace_info(level=1, offset=2):
         # We want the stack frame above this and above the error/warning/info
         inspect_stack = inspect.stack()
-        trace_info = ''
+        trace_info = ""
         for i in range(level):
             try:
                 stack_frames = inspect_stack[offset + i]
                 info = inspect.getframeinfo(stack_frames[0])
-                trace_info = '%s[%s:%s:%s]' % (trace_info,
-                                               os.path.basename(info.filename),
-                                               info.function, info.lineno)
+                trace_info = "%s[%s:%s:%s]" % (
+                    trace_info,
+                    os.path.basename(info.filename),
+                    info.function,
+                    info.lineno,
+                )
             except IndexError:
                 break
         return trace_info
 
     def _log_with(self, logging_lambda, trace_level, msg, *args, **kwargs):
         trace_info = TraceLogger._get_trace_info(level=trace_level, offset=3)
-        logging_lambda('%s %s' % (msg, trace_info), *args, **kwargs)
+        logging_lambda("%s %s" % (msg, trace_info), *args, **kwargs)
 
     def exception(self, msg, *args, **kwargs):
         self._log_with(self._logger.exception, 5, msg, *args, **kwargs)
diff --git a/src/antlion/unit_tests/AttenuatorSanityTest.py b/src/antlion/unit_tests/AttenuatorSanityTest.py
index 0864cbb..94a15f3 100644
--- a/src/antlion/unit_tests/AttenuatorSanityTest.py
+++ b/src/antlion/unit_tests/AttenuatorSanityTest.py
@@ -24,13 +24,14 @@
 class AttenuatorSanityTest(BaseTestClass):
     def __init__(self, controllers):
         BaseTestClass.__init__(self, controllers)
-        self.tests = ("test_attenuator_validation",
-                      "test_attenuator_get_max_value", )
+        self.tests = (
+            "test_attenuator_validation",
+            "test_attenuator_get_max_value",
+        )
         self.number_of_iteration = 2
 
     def test_attenuator_validation(self):
-        """Validate attenuator set and get APIs works fine.
-        """
+        """Validate attenuator set and get APIs works fine."""
         for atten in self.attenuators:
             self.log.info("Attenuator: {}".format(atten))
             try:
@@ -42,8 +43,7 @@
 
             atten_value_list = [MIN_ATTEN_VALUE, atten_max_value]
             for i in range(0, self.number_of_iteration):
-                atten_value_list.append(
-                    int(random.uniform(0, atten_max_value)))
+                atten_value_list.append(int(random.uniform(0, atten_max_value)))
 
             for atten_val in atten_value_list:
                 self.log.info("Set atten to {}".format(atten_val))
@@ -55,8 +55,7 @@
         return True
 
     def test_attenuator_get_max_value(self):
-        """Validate attenuator get_max_atten APIs works fine.
-        """
+        """Validate attenuator get_max_atten APIs works fine."""
         for atten in self.attenuators:
             try:
                 atten_max_value = atten.get_max_atten()
diff --git a/src/antlion/unit_tests/IntegrationTest.py b/src/antlion/unit_tests/IntegrationTest.py
index 0dff5f2..8a55e80 100755
--- a/src/antlion/unit_tests/IntegrationTest.py
+++ b/src/antlion/unit_tests/IntegrationTest.py
@@ -14,10 +14,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import asserts
 from antlion import base_test
 from antlion import test_runner
 
+from mobly import asserts
+
 import mock_controller
 
 
@@ -28,6 +29,7 @@
     def test_hello_world(self):
         asserts.assert_equal(self.user_params["icecream"], 42)
         asserts.assert_equal(self.user_params["extra_param"], "haha")
-        self.log.info("This is a bare minimal test to make sure the basic ACTS"
-                      "test flow works.")
+        self.log.info(
+            "This is a bare minimal test to make sure the basic ACTS" "test flow works."
+        )
         asserts.explicit_pass("Hello World")
diff --git a/src/antlion/unit_tests/Sl4aSanityTest.py b/src/antlion/unit_tests/Sl4aSanityTest.py
index a651b78..f505044 100644
--- a/src/antlion/unit_tests/Sl4aSanityTest.py
+++ b/src/antlion/unit_tests/Sl4aSanityTest.py
@@ -25,8 +25,7 @@
 
     def __init__(self, controllers):
         BaseTestClass.__init__(self, controllers)
-        self.tests = ("test_bring_up_and_shutdown",
-                      "test_message_then_shutdown_stress")
+        self.tests = ("test_bring_up_and_shutdown", "test_message_then_shutdown_stress")
 
     def test_bring_up_and_shutdown(self):
         """Constantly start and terminate sl4a sessions.
diff --git a/src/antlion/unit_tests/SnifferSanityTest.py b/src/antlion/unit_tests/SnifferSanityTest.py
index 56ae89b..6332143 100644
--- a/src/antlion/unit_tests/SnifferSanityTest.py
+++ b/src/antlion/unit_tests/SnifferSanityTest.py
@@ -26,8 +26,10 @@
         self._capture_sec = 30
         self._packet_count = 10
 
-        self._filter = {"tcpdump": "type mgt subtype beacon",
-                        "tshark": "type mgt subtype beacon"}
+        self._filter = {
+            "tcpdump": "type mgt subtype beacon",
+            "tshark": "type mgt subtype beacon",
+        }
 
     def test_sniffer_validation_using_with(self):
         """Validate sniffer configuration & capture API using the 'with' clause.
@@ -38,9 +40,10 @@
         for sniffer in self.sniffers:
             for channel in self._channels:
                 with sniffer.start_capture(
-                        override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
-                        duration=self._capture_sec,
-                        packet_count=self._packet_count):
+                    override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
+                    duration=self._capture_sec,
+                    packet_count=self._packet_count,
+                ):
                     self.log.info("Capture: %s", sniffer.get_capture_file())
 
     def test_sniffer_validation_manual(self):
@@ -57,19 +60,20 @@
             for channel in self._channels:
                 sniffer.start_capture(
                     override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
-                    packet_count=self._packet_count)
+                    packet_count=self._packet_count,
+                )
                 self.log.info("Capture: %s", sniffer.get_capture_file())
                 sniffer.wait_for_capture(timeout=self._capture_sec)
 
     def test_sniffer_validation_capture_3_beacons(self):
-        """Demonstrate the use of additional configuration.
-        """
+        """Demonstrate the use of additional configuration."""
         index = 0
         for sniffer in self.sniffers:
             for channel in self._channels:
                 with sniffer.start_capture(
-                        override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
-                        duration=self._capture_sec,
-                        packet_count=3,
-                        additional_args=self._filter[sniffer.get_subtype()]):
+                    override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
+                    duration=self._capture_sec,
+                    packet_count=3,
+                    additional_args=self._filter[sniffer.get_subtype()],
+                ):
                     self.log.info("Capture: %s", sniffer.get_capture_file())
diff --git a/src/antlion/unit_tests/acts_adb_test.py b/src/antlion/unit_tests/acts_adb_test.py
index dcaa91b..05a51bc 100755
--- a/src/antlion/unit_tests/acts_adb_test.py
+++ b/src/antlion/unit_tests/acts_adb_test.py
@@ -22,7 +22,7 @@
 
 
 class MockJob(object):
-    def __init__(self, exit_status=0, stderr='', stdout=''):
+    def __init__(self, exit_status=0, stderr="", stdout=""):
         self.exit_status = exit_status
         self.stderr = stderr
         self.stdout = stdout
@@ -37,56 +37,56 @@
     """A class for testing antlion/controllers/adb.py"""
 
     def test__exec_cmd_failure_old_adb(self):
-        mock_job = MockJob(exit_status=1, stderr='error: device not found')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
+        mock_job = MockJob(exit_status=1, stderr="error: device not found")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
             with self.assertRaises(AdbError):
                 MockAdbProxy()._exec_cmd(cmd)
 
     def test__exec_cmd_failure_new_adb(self):
-        mock_job = MockJob(
-            exit_status=1, stderr='error: device \'DEADBEEF\' not found')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
+        mock_job = MockJob(exit_status=1, stderr="error: device 'DEADBEEF' not found")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
             with self.assertRaises(AdbError):
                 MockAdbProxy()._exec_cmd(cmd)
 
     def test__exec_cmd_pass_basic(self):
-        mock_job = MockJob(exit_status=0, stderr='DEADBEEF', stdout='FEEDACAB')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
+        mock_job = MockJob(exit_status=0, stderr="DEADBEEF", stdout="FEEDACAB")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
             result = MockAdbProxy()._exec_cmd(cmd)
-        self.assertEqual(result, 'FEEDACAB')
+        self.assertEqual(result, "FEEDACAB")
 
     def test__exec_cmd_ignore_status(self):
-        mock_job = MockJob(exit_status=0, stderr='DEADBEEF', stdout='')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
+        mock_job = MockJob(exit_status=0, stderr="DEADBEEF", stdout="")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
             result = MockAdbProxy()._exec_cmd(cmd, ignore_status=True)
-        self.assertEqual(result, 'DEADBEEF')
+        self.assertEqual(result, "DEADBEEF")
 
     def test__exec_cmd_pass_grep(self):
-        mock_job = MockJob(exit_status=1, stderr='', stdout='foo')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"grep foo"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
+        mock_job = MockJob(exit_status=1, stderr="", stdout="foo")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"grep foo"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
             result = MockAdbProxy()._exec_cmd(cmd)
-        self.assertEqual(result, 'foo')
+        self.assertEqual(result, "foo")
 
     def test__exec_cmd_failure_ret_nonzero(self):
-        mock_job = MockJob(exit_status=1, stderr='error not related to adb')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
+        mock_job = MockJob(exit_status=1, stderr="error not related to adb")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
             with self.assertRaises(AdbCommandError):
                 MockAdbProxy()._exec_cmd(cmd)
 
     def test__exec_cmd_raises_on_bind_error(self):
         """Tests _exec_cmd raises an AdbError on port forwarding failure."""
-        mock_job = MockJob(exit_status=1,
-                           stderr='error: cannot bind listener: '
-                                  'Address already in use',
-                           stdout='')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
+        mock_job = MockJob(
+            exit_status=1,
+            stderr="error: cannot bind listener: " "Address already in use",
+            stdout="",
+        )
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
             with self.assertRaises(AdbError):
                 MockAdbProxy()._exec_cmd(cmd)
 
@@ -94,14 +94,15 @@
         """Tests the positive case for AdbProxy.get_version_number()."""
         proxy = MockAdbProxy()
         expected_version_number = 39
-        proxy.version = lambda: ('Android Debug Bridge version 1.0.%s\nblah' %
-                                 expected_version_number)
+        proxy.version = lambda: (
+            "Android Debug Bridge version 1.0.%s\nblah" % expected_version_number
+        )
         self.assertEqual(expected_version_number, proxy.get_version_number())
 
     def test__get_version_number_raises_upon_parse_failure(self):
         """Tests the failure case for AdbProxy.get_version_number()."""
         proxy = MockAdbProxy()
-        proxy.version = lambda: 'Bad format'
+        proxy.version = lambda: "Bad format"
         with self.assertRaises(AdbError):
             proxy.get_version_number()
 
diff --git a/src/antlion/unit_tests/acts_android_device_test.py b/src/antlion/unit_tests/acts_android_device_test.py
index b8591fd..dd2ecd3 100755
--- a/src/antlion/unit_tests/acts_android_device_test.py
+++ b/src/antlion/unit_tests/acts_android_device_test.py
@@ -31,7 +31,8 @@
 # Mock start and end time of the adb cat.
 MOCK_ADB_EPOCH_BEGIN_TIME = 191000123
 MOCK_ADB_LOGCAT_BEGIN_TIME = logger.normalize_log_line_timestamp(
-    logger.epoch_to_log_line_timestamp(MOCK_ADB_EPOCH_BEGIN_TIME))
+    logger.epoch_to_log_line_timestamp(MOCK_ADB_EPOCH_BEGIN_TIME)
+)
 MOCK_ADB_LOGCAT_END_TIME = "1970-01-02 21:22:02.000"
 
 MOCK_SERIAL = 1
@@ -68,12 +69,14 @@
 class MockAdbProxy(object):
     """Mock class that swaps out calls to adb with mock calls."""
 
-    def __init__(self,
-                 serial,
-                 fail_br=False,
-                 fail_br_before_N=False,
-                 build_id=MOCK_RELEASE_BUILD_ID,
-                 return_value=None):
+    def __init__(
+        self,
+        serial,
+        fail_br=False,
+        fail_br_before_N=False,
+        build_id=MOCK_RELEASE_BUILD_ID,
+        return_value=None,
+    ):
         self.serial = serial
         self.fail_br = fail_br
         self.fail_br_before_N = fail_br_before_N
@@ -115,12 +118,15 @@
 
     def bugreport(self, params, timeout=android_device.BUG_REPORT_TIMEOUT):
         expected = os.path.join(
-            logging.log_path, "AndroidDevice%s" % self.serial,
-            "AndroidDevice%s_%s.txt" %
-            (self.serial,
-             logger.normalize_log_line_timestamp(MOCK_ADB_LOGCAT_BEGIN_TIME)))
-        assert expected in params, "Expected '%s', got '%s'." % (expected,
-                                                                 params)
+            logging.log_path,
+            "AndroidDevice%s" % self.serial,
+            "AndroidDevice%s_%s.txt"
+            % (
+                self.serial,
+                logger.normalize_log_line_timestamp(MOCK_ADB_LOGCAT_BEGIN_TIME),
+            ),
+        )
+        assert expected in params, "Expected '%s', got '%s'." % (expected, params)
 
     def __getattr__(self, name):
         """All calls to the none-existent functions in adb proxy would
@@ -128,13 +134,13 @@
         """
 
         def adb_call(*args, **kwargs):
-            arg_str = ' '.join(str(elem) for elem in args)
+            arg_str = " ".join(str(elem) for elem in args)
             return arg_str
 
         return adb_call
 
 
-class MockFastbootProxy():
+class MockFastbootProxy:
     """Mock class that swaps out calls to adb with mock calls."""
 
     def __init__(self, serial):
@@ -145,7 +151,7 @@
 
     def __getattr__(self, name):
         def fastboot_call(*args):
-            arg_str = ' '.join(str(elem) for elem in args)
+            arg_str = " ".join(str(elem) for elem in args)
             return arg_str
 
         return fastboot_call
@@ -164,17 +170,14 @@
         self.tmp_dir = tempfile.mkdtemp()
 
     def tearDown(self):
-        """Removes the temp dir.
-        """
+        """Removes the temp dir."""
         shutil.rmtree(self.tmp_dir)
 
     # Tests for android_device module functions.
     # These tests use mock AndroidDevice instances.
 
-    @mock.patch.object(
-        android_device, "get_all_instances", new=mock_get_all_instances)
-    @mock.patch.object(
-        android_device, "list_adb_devices", new=mock_list_adb_devices)
+    @mock.patch.object(android_device, "get_all_instances", new=mock_get_all_instances)
+    @mock.patch.object(android_device, "list_adb_devices", new=mock_list_adb_devices)
     def test_create_with_pickup_all(self):
         pick_all_token = android_device.ANDROID_DEVICE_PICK_ALL_TOKEN
         actual_ads = android_device.create(pick_all_token)
@@ -183,14 +186,12 @@
 
     def test_create_with_empty_config(self):
         expected_msg = android_device.ANDROID_DEVICE_EMPTY_CONFIG_MSG
-        with self.assertRaisesRegex(errors.AndroidDeviceConfigError,
-                                    expected_msg):
+        with self.assertRaisesRegex(errors.AndroidDeviceConfigError, expected_msg):
             android_device.create([])
 
     def test_create_with_not_list_config(self):
         expected_msg = android_device.ANDROID_DEVICE_NOT_LIST_CONFIG_MSG
-        with self.assertRaisesRegex(errors.AndroidDeviceConfigError,
-                                    expected_msg):
+        with self.assertRaisesRegex(errors.AndroidDeviceConfigError, expected_msg):
             android_device.create("HAHA")
 
     def test_get_device_success_with_serial(self):
@@ -205,14 +206,16 @@
         expected_h_port = 5555
         ads[1].h_port = expected_h_port
         ad = android_device.get_device(
-            ads, serial=expected_serial, h_port=expected_h_port)
+            ads, serial=expected_serial, h_port=expected_h_port
+        )
         self.assertEqual(ad.serial, expected_serial)
         self.assertEqual(ad.h_port, expected_h_port)
 
     def test_get_device_no_match(self):
         ads = get_mock_ads(5)
-        expected_msg = ("Could not find a target device that matches condition"
-                        ": {'serial': 5}.")
+        expected_msg = (
+            "Could not find a target device that matches condition" ": {'serial': 5}."
+        )
         with self.assertRaisesRegex(ValueError, expected_msg):
             ad = android_device.get_device(ads, serial=len(ads))
 
@@ -234,7 +237,8 @@
         ads[1].start_services = mock.MagicMock()
         ads[1].clean_up = mock.MagicMock()
         ads[2].start_services = mock.MagicMock(
-            side_effect=errors.AndroidDeviceError(msg))
+            side_effect=errors.AndroidDeviceError(msg)
+        )
         ads[2].clean_up = mock.MagicMock()
         with self.assertRaisesRegex(errors.AndroidDeviceError, msg):
             android_device._start_services_on_ads(ads)
@@ -247,11 +251,12 @@
     # in AndroidDeivce.
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_AndroidDevice_instantiation(self, MockFastboot, MockAdbProxy):
         """Verifies the AndroidDevice object's basic attributes are correctly
         set after instantiation.
@@ -260,18 +265,17 @@
         self.assertEqual(ad.serial, 1)
         self.assertEqual(ad.model, "fakemodel")
         self.assertIsNone(ad.adb_logcat_process)
-        expected_lp = os.path.join(logging.log_path,
-                                   "AndroidDevice%s" % MOCK_SERIAL)
+        expected_lp = os.path.join(logging.log_path, "AndroidDevice%s" % MOCK_SERIAL)
         self.assertEqual(ad.log_path, expected_lp)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_AndroidDevice_build_info_release(self, MockFastboot,
-                                              MockAdbProxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_AndroidDevice_build_info_release(self, MockFastboot, MockAdbProxy):
         """Verifies the AndroidDevice object's basic attributes are correctly
         set after instantiation.
         """
@@ -281,11 +285,13 @@
         self.assertEqual(build_info["build_type"], "userdebug")
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_DEV_BUILD_ID))
+        "antlion.controllers.adb.AdbProxy",
+        return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_DEV_BUILD_ID),
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_AndroidDevice_build_info_dev(self, MockFastboot, MockAdbProxy):
         """Verifies the AndroidDevice object's basic attributes are correctly
         set after instantiation.
@@ -296,11 +302,13 @@
         self.assertEqual(build_info["build_type"], "userdebug")
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_NYC_BUILD_ID))
+        "antlion.controllers.adb.AdbProxy",
+        return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_NYC_BUILD_ID),
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_AndroidDevice_build_info_nyc(self, MockFastboot, MockAdbProxy):
         """Verifies the AndroidDevice object's build id is set correctly for
         NYC releases.
@@ -310,113 +318,133 @@
         self.assertEqual(build_info["build_id"], MOCK_NYC_BUILD_ID)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('os.makedirs')
-    @mock.patch('antlion.utils.exe_cmd')
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("os.makedirs")
+    @mock.patch("antlion.utils.exe_cmd")
     @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.device_log_path',
-        new_callable=mock.PropertyMock)
-    def test_AndroidDevice_take_bug_report(self, mock_log_path, exe_mock,
-                                           mock_makedirs, FastbootProxy,
-                                           MockAdbProxy):
+        "antlion.controllers.android_device.AndroidDevice.device_log_path",
+        new_callable=mock.PropertyMock,
+    )
+    def test_AndroidDevice_take_bug_report(
+        self, mock_log_path, exe_mock, mock_makedirs, FastbootProxy, MockAdbProxy
+    ):
         """Verifies AndroidDevice.take_bug_report calls the correct adb command
         and writes the bugreport file to the correct path.
         """
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         mock_log_path.return_value = os.path.join(
-            logging.log_path, "AndroidDevice%s" % ad.serial)
+            logging.log_path, "AndroidDevice%s" % ad.serial
+        )
         ad.take_bug_report("test_something", 234325.32)
         mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL, fail_br=True))
+        "antlion.controllers.adb.AdbProxy",
+        return_value=MockAdbProxy(MOCK_SERIAL, fail_br=True),
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('os.makedirs')
-    @mock.patch('antlion.utils.exe_cmd')
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("os.makedirs")
+    @mock.patch("antlion.utils.exe_cmd")
     @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.device_log_path',
-        new_callable=mock.PropertyMock)
+        "antlion.controllers.android_device.AndroidDevice.device_log_path",
+        new_callable=mock.PropertyMock,
+    )
     def test_AndroidDevice_take_bug_report_fail(self, mock_log_path, *_):
         """Verifies AndroidDevice.take_bug_report writes out the correct message
         when taking bugreport fails.
         """
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         mock_log_path.return_value = os.path.join(
-            logging.log_path, "AndroidDevice%s" % ad.serial)
+            logging.log_path, "AndroidDevice%s" % ad.serial
+        )
         expected_msg = "Failed to take bugreport on 1: OMG I died!"
         with self.assertRaisesRegex(errors.AndroidDeviceError, expected_msg):
             ad.take_bug_report("test_something", 4346343.23)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL, fail_br_before_N=True))
+        "antlion.controllers.adb.AdbProxy",
+        return_value=MockAdbProxy(MOCK_SERIAL, fail_br_before_N=True),
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('os.makedirs')
-    @mock.patch('antlion.utils.exe_cmd')
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("os.makedirs")
+    @mock.patch("antlion.utils.exe_cmd")
     @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.device_log_path',
-        new_callable=mock.PropertyMock)
+        "antlion.controllers.android_device.AndroidDevice.device_log_path",
+        new_callable=mock.PropertyMock,
+    )
     def test_AndroidDevice_take_bug_report_fallback(
-            self, mock_log_path, exe_mock, mock_makedirs, FastbootProxy,
-            MockAdbProxy):
+        self, mock_log_path, exe_mock, mock_makedirs, FastbootProxy, MockAdbProxy
+    ):
         """Verifies AndroidDevice.take_bug_report falls back to traditional
         bugreport on builds that do not have bugreportz.
         """
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         mock_log_path.return_value = os.path.join(
-            logging.log_path, "AndroidDevice%s" % ad.serial)
+            logging.log_path, "AndroidDevice%s" % ad.serial
+        )
         ad.take_bug_report("test_something", MOCK_ADB_EPOCH_BEGIN_TIME)
         mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('antlion.libs.proc.process.Process')
-    def test_AndroidDevice_start_adb_logcat(self, proc_mock, FastbootProxy,
-                                            MockAdbProxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("antlion.libs.proc.process.Process")
+    def test_AndroidDevice_start_adb_logcat(
+        self, proc_mock, FastbootProxy, MockAdbProxy
+    ):
         """Verifies the AndroidDevice method start_adb_logcat. Checks that the
         underlying logcat process is started properly and correct warning msgs
         are generated.
         """
-        with mock.patch(('antlion.controllers.android_lib.logcat.'
-                         'create_logcat_keepalive_process'),
-                        return_value=proc_mock) as create_proc_mock:
+        with mock.patch(
+            (
+                "antlion.controllers.android_lib.logcat."
+                "create_logcat_keepalive_process"
+            ),
+            return_value=proc_mock,
+        ) as create_proc_mock:
             ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
             ad.start_adb_logcat()
             # Verify start did the correct operations.
             self.assertTrue(ad.adb_logcat_process)
             log_dir = "AndroidDevice%s" % ad.serial
-            create_proc_mock.assert_called_with(ad.serial, log_dir, '-b all')
+            create_proc_mock.assert_called_with(ad.serial, log_dir, "-b all")
             proc_mock.start.assert_called_with()
             # Expect warning msg if start is called back to back.
             expected_msg = "Android device .* already has a running adb logcat"
             proc_mock.is_running.return_value = True
-            with self.assertLogs(level='WARNING') as log:
+            with self.assertLogs(level="WARNING") as log:
                 ad.start_adb_logcat()
                 self.assertRegex(log.output[0], expected_msg)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('antlion.controllers.android_lib.logcat.'
-                'create_logcat_keepalive_process')
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch(
+        "antlion.controllers.android_lib.logcat." "create_logcat_keepalive_process"
+    )
     def test_AndroidDevice_start_adb_logcat_with_user_param(
-            self, create_proc_mock, FastbootProxy, MockAdbProxy):
+        self, create_proc_mock, FastbootProxy, MockAdbProxy
+    ):
         """Verifies that start_adb_logcat generates the correct adb logcat
         command if adb_logcat_param is specified.
         """
@@ -426,17 +454,19 @@
         # Verify that create_logcat_keepalive_process is called with the
         # correct command.
         log_dir = "AndroidDevice%s" % ad.serial
-        create_proc_mock.assert_called_with(ad.serial, log_dir, '-b radio')
+        create_proc_mock.assert_called_with(ad.serial, log_dir, "-b radio")
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('antlion.libs.proc.process.Process')
-    def test_AndroidDevice_stop_adb_logcat(self, proc_mock, FastbootProxy,
-                                           MockAdbProxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("antlion.libs.proc.process.Process")
+    def test_AndroidDevice_stop_adb_logcat(
+        self, proc_mock, FastbootProxy, MockAdbProxy
+    ):
         """Verifies the AndroidDevice method stop_adb_logcat. Checks that the
         underlying logcat process is stopped properly and correct warning msgs
         are generated.
@@ -444,10 +474,9 @@
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         ad.adb_logcat_process = proc_mock
         # Expect warning msg if stop is called before start.
-        expected_msg = (
-            "Android device .* does not have an ongoing adb logcat")
+        expected_msg = "Android device .* does not have an ongoing adb logcat"
         proc_mock.is_running.return_value = False
-        with self.assertLogs(level='WARNING') as log:
+        with self.assertLogs(level="WARNING") as log:
             ad.stop_adb_logcat()
             self.assertRegex(log.output[0], expected_msg)
 
@@ -457,70 +486,73 @@
         proc_mock.stop.assert_called_with()
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_get_apk_process_id_process_cannot_find(self, fastboot_proxy,
-                                                    adb_proxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_get_apk_process_id_process_cannot_find(self, fastboot_proxy, adb_proxy):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         ad.adb.return_value = "does_not_contain_value"
         self.assertEqual(None, ad.get_package_pid("some_package"))
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_get_apk_process_id_process_exists_second_try(self, fastboot_proxy,
-                                                          adb_proxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_get_apk_process_id_process_exists_second_try(
+        self, fastboot_proxy, adb_proxy
+    ):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         ad.adb.return_multiple = True
         ad.adb.return_value = ["", "system 1 2 3 4  S com.some_package"]
         self.assertEqual(1, ad.get_package_pid("some_package"))
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_get_apk_process_id_bad_return(self, fastboot_proxy, adb_proxy):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         ad.adb.return_value = "bad_return_index_error"
         self.assertEqual(None, ad.get_package_pid("some_package"))
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_get_apk_process_id_bad_return(self, fastboot_proxy, adb_proxy):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         ad.adb.return_value = "bad return value error"
         self.assertEqual(None, ad.get_package_pid("some_package"))
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_enabled_only_system_enabled(self, fastboot_proxy,
-                                                       adb_proxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_enabled_only_system_enabled(self, fastboot_proxy, adb_proxy):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
+        root_user_id = "0"
 
         ad.adb.get_user_id = mock.MagicMock()
         ad.adb.get_user_id.return_value = root_user_id
 
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '',  # system.verified
-            '2'
-        ])  # vendor.verified
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["", "2"]  # system.verified
+        )  # vendor.verified
         ad.adb.ensure_user = mock.MagicMock()
         ad.reboot = mock.MagicMock()
         ad.ensure_verity_enabled()
@@ -529,23 +561,22 @@
         ad.adb.ensure_user.assert_called_with(root_user_id)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_enabled_only_vendor_enabled(self, fastboot_proxy,
-                                                       adb_proxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_enabled_only_vendor_enabled(self, fastboot_proxy, adb_proxy):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
+        root_user_id = "0"
 
         ad.adb.get_user_id = mock.MagicMock()
         ad.adb.get_user_id.return_value = root_user_id
 
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '2',  # system.verified
-            ''
-        ])  # vendor.verified
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["2", ""]  # system.verified
+        )  # vendor.verified
         ad.adb.ensure_user = mock.MagicMock()
         ad.reboot = mock.MagicMock()
 
@@ -555,23 +586,24 @@
         ad.adb.ensure_user.assert_called_with(root_user_id)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_enabled_both_enabled_at_start(self, fastboot_proxy,
-                                                         adb_proxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_enabled_both_enabled_at_start(
+        self, fastboot_proxy, adb_proxy
+    ):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
+        root_user_id = "0"
 
         ad.adb.get_user_id = mock.MagicMock()
         ad.adb.get_user_id.return_value = root_user_id
 
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '2',  # system.verified
-            '2'
-        ])  # vendor.verified
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["2", "2"]  # system.verified
+        )  # vendor.verified
         ad.adb.ensure_user = mock.MagicMock()
         ad.reboot = mock.MagicMock()
         ad.ensure_verity_enabled()
@@ -579,23 +611,24 @@
         assert not ad.reboot.called
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_ensure_verity_disabled_system_already_disabled(
-            self, fastboot_proxy, adb_proxy):
+        self, fastboot_proxy, adb_proxy
+    ):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
+        root_user_id = "0"
 
         ad.adb.get_user_id = mock.MagicMock()
         ad.adb.get_user_id.return_value = root_user_id
 
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '2',  # system.verified
-            ''
-        ])  # vendor.verified
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["2", ""]  # system.verified
+        )  # vendor.verified
         ad.adb.ensure_user = mock.MagicMock()
         ad.reboot = mock.MagicMock()
         ad.ensure_verity_disabled()
@@ -603,23 +636,24 @@
         ad.reboot.assert_called_once()
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_ensure_verity_disabled_vendor_already_disabled(
-            self, fastboot_proxy, adb_proxy):
+        self, fastboot_proxy, adb_proxy
+    ):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
+        root_user_id = "0"
 
         ad.adb.get_user_id = mock.MagicMock()
         ad.adb.get_user_id.return_value = root_user_id
 
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '',  # system.verified
-            '2'
-        ])  # vendor.verified
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["", "2"]  # system.verified
+        )  # vendor.verified
         ad.adb.ensure_user = mock.MagicMock()
         ad.reboot = mock.MagicMock()
 
@@ -629,23 +663,22 @@
         ad.adb.ensure_user.assert_called_with(root_user_id)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_disabled_disabled_at_start(
-            self, fastboot_proxy, adb_proxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_disabled_disabled_at_start(self, fastboot_proxy, adb_proxy):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
+        root_user_id = "0"
 
         ad.adb.get_user_id = mock.MagicMock()
         ad.adb.get_user_id.return_value = root_user_id
 
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '',  # system.verified
-            ''
-        ])  # vendor.verified
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["", ""]  # system.verified
+        )  # vendor.verified
         ad.adb.ensure_user = mock.MagicMock()
         ad.reboot = mock.MagicMock()
 
@@ -654,63 +687,66 @@
         assert not ad.reboot.called
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_push_system_file(self, fastboot_proxy, adb_proxy):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         ad.ensure_verity_disabled = mock.MagicMock()
         ad.adb.remount = mock.MagicMock()
         ad.adb.push = mock.MagicMock()
 
-        ret = ad.push_system_file('asdf', 'jkl')
+        ret = ad.push_system_file("asdf", "jkl")
         self.assertTrue(ret)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_push_system_file_returns_false_on_error(self, fastboot_proxy,
-                                                     adb_proxy):
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_push_system_file_returns_false_on_error(self, fastboot_proxy, adb_proxy):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         ad.ensure_verity_disabled = mock.MagicMock()
         ad.adb.remount = mock.MagicMock()
-        ad.adb.push = mock.MagicMock(return_value='error')
+        ad.adb.push = mock.MagicMock(return_value="error")
 
-        ret = ad.push_system_file('asdf', 'jkl')
+        ret = ad.push_system_file("asdf", "jkl")
         self.assertFalse(ret)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_get_my_current_focus_window_return_empty_string(self, *_):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb.return_value = ''
+        ad.adb.return_value = ""
 
         ret = ad.get_my_current_focus_window()
 
-        self.assertEqual('', ret)
+        self.assertEqual("", ret)
 
     @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
     @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
     def test_get_my_current_focus_window_return_current_window(self, *_):
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb.return_value = 'mCurrentFocus=Window{a247ded u0 NotificationShade}'
+        ad.adb.return_value = "mCurrentFocus=Window{a247ded u0 NotificationShade}"
 
         ret = ad.get_my_current_focus_window()
 
-        self.assertEqual('NotificationShade', ret)
+        self.assertEqual("NotificationShade", ret)
 
 
 if __name__ == "__main__":
diff --git a/src/antlion/unit_tests/acts_asserts_test.py b/src/antlion/unit_tests/acts_asserts_test.py
index 5fbfe5b..e11e120 100755
--- a/src/antlion/unit_tests/acts_asserts_test.py
+++ b/src/antlion/unit_tests/acts_asserts_test.py
@@ -16,20 +16,19 @@
 
 import unittest
 
-from antlion import asserts
 from antlion import signals
 
+from mobly import asserts
+
 MSG_EXPECTED_EXCEPTION = "This is an expected exception."
 
 
 class ActsAssertsTest(unittest.TestCase):
-    """Verifies that asserts.xxx functions raise the correct test signals.
-    """
+    """Verifies that asserts.xxx functions raise the correct test signals."""
 
     def test_assert_false(self):
         asserts.assert_false(False, MSG_EXPECTED_EXCEPTION)
-        with self.assertRaisesRegexp(signals.TestFailure,
-                                     MSG_EXPECTED_EXCEPTION):
+        with self.assertRaisesRegexp(signals.TestFailure, MSG_EXPECTED_EXCEPTION):
             asserts.assert_false(True, MSG_EXPECTED_EXCEPTION)
 
 
diff --git a/src/antlion/unit_tests/acts_base_class_test.py b/src/antlion/unit_tests/acts_base_class_test.py
index 18c989a..a0b9a1d 100755
--- a/src/antlion/unit_tests/acts_base_class_test.py
+++ b/src/antlion/unit_tests/acts_base_class_test.py
@@ -21,18 +21,18 @@
 import mock
 import mock_controller
 
-from antlion import asserts
 from antlion import base_test
 from antlion import signals
 
+from mobly import asserts
 from mobly import base_test as mobly_base_test
 import mobly.config_parser as mobly_config_parser
 
-MSG_EXPECTED_EXCEPTION = 'This is an expected exception.'
-MSG_EXPECTED_TEST_FAILURE = 'This is an expected test failure.'
-MSG_UNEXPECTED_EXCEPTION = 'Unexpected exception!'
+MSG_EXPECTED_EXCEPTION = "This is an expected exception."
+MSG_EXPECTED_TEST_FAILURE = "This is an expected test failure."
+MSG_UNEXPECTED_EXCEPTION = "Unexpected exception!"
 
-MOCK_EXTRA = {'key': 'value', 'answer_to_everything': 42}
+MOCK_EXTRA = {"key": "value", "answer_to_everything": 42}
 
 
 def never_call():
@@ -46,18 +46,18 @@
 class ActsBaseClassTest(unittest.TestCase):
     def setUp(self):
         self.tmp_dir = tempfile.mkdtemp()
-        self.tb_key = 'testbed_configs'
+        self.tb_key = "testbed_configs"
         self.test_run_config = mobly_config_parser.TestRunConfig()
-        self.test_run_config.testbed_name = 'SampleTestBed'
+        self.test_run_config.testbed_name = "SampleTestBed"
         self.test_run_config.controller_configs = {
             self.tb_key: {
-                'name': self.test_run_config.testbed_name,
+                "name": self.test_run_config.testbed_name,
             },
         }
         self.test_run_config.log_path = self.tmp_dir
-        self.test_run_config.user_params = {'some_param': 'hahaha'}
+        self.test_run_config.user_params = {"some_param": "hahaha"}
         self.test_run_config.summary_writer = mock.MagicMock()
-        self.mock_test_name = 'test_something'
+        self.mock_test_name = "test_something"
 
     def tearDown(self):
         shutil.rmtree(self.tmp_dir)
@@ -66,13 +66,14 @@
         class MockBaseTest(base_test.BaseTestClass):
             def test_func(self):
                 asserts.assert_true(
-                    self.current_test_name == 'test_func',
-                    'Got unexpected test name %s.' % self.current_test_name)
+                    self.current_test_name == "test_func",
+                    "Got unexpected test name %s." % self.current_test_name,
+                )
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
+        bt_cls.run(test_names=["test_func"])
         actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertIsNone(actual_record.details)
         self.assertIsNone(actual_record.extras)
 
@@ -80,7 +81,7 @@
         class MockBaseTest(base_test.BaseTestClass):
             def __init__(self, controllers):
                 super(MockBaseTest, self).__init__(controllers)
-                self.tests = ('test_something', )
+                self.tests = ("test_something",)
 
             def test_something(self):
                 pass
@@ -92,14 +93,19 @@
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_something')
+        self.assertEqual(actual_record.test_name, "test_something")
 
     def test_cli_test_selection_match_self_tests_list(self):
         class MockBaseTest(base_test.BaseTestClass):
             def __init__(self, controllers):
                 super(MockBaseTest, self).__init__(controllers)
-                self.tests = ('test_star1', 'test_star2', 'test_question_mark',
-                              'test_char_seq', 'test_no_match')
+                self.tests = (
+                    "test_star1",
+                    "test_star2",
+                    "test_question_mark",
+                    "test_char_seq",
+                    "test_no_match",
+                )
 
             def test_star1(self):
                 pass
@@ -120,15 +126,15 @@
 
         bt_cls = MockBaseTest(self.test_run_config)
         test_names = [
-            'test_st*r1', 'test_*2', 'test_?uestion_mark', 'test_c[fghi]ar_seq'
+            "test_st*r1",
+            "test_*2",
+            "test_?uestion_mark",
+            "test_c[fghi]ar_seq",
         ]
         bt_cls.run(test_names=test_names)
         passed_names = [p.test_name for p in bt_cls.results.passed]
         self.assertEqual(len(passed_names), len(test_names))
-        for test in [
-                'test_star1', 'test_star2', 'test_question_mark',
-                'test_char_seq'
-        ]:
+        for test in ["test_star1", "test_star2", "test_question_mark", "test_char_seq"]:
             self.assertIn(test, passed_names)
 
     def test_default_execution_of_all_tests(self):
@@ -144,7 +150,7 @@
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_something')
+        self.assertEqual(actual_record.test_name, "test_something")
 
     def test_setup_class_fail_by_exception(self):
         call_check = mock.MagicMock()
@@ -158,22 +164,22 @@
                 never_call()
 
             def on_skip(self, test_name, begin_time):
-                call_check('haha')
+                call_check("haha")
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, 'test_something')
+        self.assertEqual(actual_record.test_name, "test_something")
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-        call_check.assert_called_once_with('haha')
+        call_check.assert_called_once_with("haha")
 
     def test_setup_test_fail_by_exception(self):
         class MockBaseTest(base_test.BaseTestClass):
@@ -185,18 +191,18 @@
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_something'])
+        bt_cls.run(test_names=["test_something"])
         actual_record = bt_cls.results.error[0]
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -210,18 +216,18 @@
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_something'])
+        bt_cls.run(test_names=["test_something"])
         actual_record = bt_cls.results.failed[0]
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 0,
+            "Executed": 1,
+            "Failed": 1,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -235,19 +241,19 @@
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_something'])
+        bt_cls.run(test_names=["test_something"])
         actual_record = bt_cls.results.failed[0]
-        expected_msg = 'Setup for %s failed.' % self.mock_test_name
+        expected_msg = "Setup for %s failed." % self.mock_test_name
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertEqual(actual_record.details, expected_msg)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 0,
+            "Executed": 1,
+            "Failed": 1,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -266,12 +272,12 @@
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -290,12 +296,12 @@
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -304,7 +310,7 @@
 
         class MockBaseTest(base_test.BaseTestClass):
             def teardown_test(self):
-                my_mock('teardown_test')
+                my_mock("teardown_test")
 
             def test_something(self):
                 pass
@@ -312,17 +318,17 @@
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.passed[0]
-        my_mock.assert_called_once_with('teardown_test')
+        my_mock.assert_called_once_with("teardown_test")
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertIsNone(actual_record.details)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 1,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 0,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 1,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -334,7 +340,7 @@
                 raise Exception(MSG_EXPECTED_EXCEPTION)
 
             def teardown_test(self):
-                my_mock('teardown_test')
+                my_mock("teardown_test")
 
             def test_something(self):
                 pass
@@ -342,17 +348,17 @@
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.error[0]
-        my_mock.assert_called_once_with('teardown_test')
+        my_mock.assert_called_once_with("teardown_test")
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -361,7 +367,7 @@
 
         class MockBaseTest(base_test.BaseTestClass):
             def teardown_test(self):
-                my_mock('teardown_test')
+                my_mock("teardown_test")
 
             def test_something(self):
                 raise Exception(MSG_EXPECTED_EXCEPTION)
@@ -369,17 +375,17 @@
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.error[0]
-        my_mock.assert_called_once_with('teardown_test')
+        my_mock.assert_called_once_with("teardown_test")
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -388,7 +394,7 @@
 
         class MockBaseTest(base_test.BaseTestClass):
             def on_exception(self, test_name, begin_time):
-                my_mock('on_exception')
+                my_mock("on_exception")
 
             def teardown_test(self):
                 raise Exception(MSG_EXPECTED_EXCEPTION)
@@ -398,18 +404,18 @@
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
-        my_mock.assert_called_once_with('on_exception')
+        my_mock.assert_called_once_with("on_exception")
         actual_record = bt_cls.results.error[0]
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -418,25 +424,25 @@
 
         class MockBaseTest(base_test.BaseTestClass):
             def on_fail(self, test_name, begin_time):
-                my_mock('on_fail')
+                my_mock("on_fail")
 
             def test_something(self):
                 asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
-        my_mock.assert_called_once_with('on_fail')
+        my_mock.assert_called_once_with("on_fail")
         actual_record = bt_cls.results.failed[0]
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 0,
+            "Executed": 1,
+            "Failed": 1,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -448,25 +454,25 @@
                 raise Exception(MSG_EXPECTED_EXCEPTION)
 
             def on_fail(self, test_name, begin_time):
-                my_mock('on_fail')
+                my_mock("on_fail")
 
             def test_something(self):
                 pass
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
-        my_mock.assert_called_once_with('on_fail')
+        my_mock.assert_called_once_with("on_fail")
         actual_record = bt_cls.results.error[0]
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -478,26 +484,25 @@
                 return False
 
             def on_fail(self, test_name, begin_time):
-                my_mock('on_fail')
+                my_mock("on_fail")
 
             def test_something(self):
                 pass
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
-        my_mock.assert_called_once_with('on_fail')
+        my_mock.assert_called_once_with("on_fail")
         actual_record = bt_cls.results.failed[0]
         self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details,
-                         'Setup for test_something failed.')
+        self.assertEqual(actual_record.details, "Setup for test_something failed.")
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 0,
+            "Executed": 1,
+            "Failed": 1,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -513,22 +518,22 @@
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.error[0]
-        self.assertIn('_on_pass', actual_record.extra_errors)
+        self.assertIn("_on_pass", actual_record.extra_errors)
         self.assertEqual(actual_record.test_name, self.mock_test_name)
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
     def test_failure_in_procedure_functions_is_recorded(self):
-        expected_msg = 'Something failed in on_pass.'
+        expected_msg = "Something failed in on_pass."
 
         class MockBaseTest(base_test.BaseTestClass):
             def on_pass(self, test_name, begin_time):
@@ -544,12 +549,12 @@
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -559,51 +564,55 @@
                 asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
 
             def test_something(self):
-                raise Exception('Test Body Exception.')
+                raise Exception("Test Body Exception.")
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.error[0]
         self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, 'Test Body Exception.')
+        self.assertEqual(actual_record.details, "Test Body Exception.")
         self.assertIsNone(actual_record.extras)
-        self.assertEqual(actual_record.extra_errors['teardown_test'].details,
-                         'This is an expected exception.')
+        self.assertEqual(
+            actual_record.extra_errors["teardown_test"].details,
+            "This is an expected exception.",
+        )
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
     def test_explicit_pass_but_teardown_test_raises_an_exception(self):
-        """Test record result should be marked as UNKNOWN as opposed to PASS.
-        """
+        """Test record result should be marked as UNKNOWN as opposed to PASS."""
+
         class MockBaseTest(base_test.BaseTestClass):
             def teardown_test(self):
                 asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
 
             def test_something(self):
-                asserts.explicit_pass('Test Passed!')
+                asserts.explicit_pass("Test Passed!")
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.error[0]
         self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, 'Test Passed!')
+        self.assertEqual(actual_record.details, "Test Passed!")
         self.assertIsNone(actual_record.extras)
-        self.assertEqual(actual_record.extra_errors['teardown_test'].details,
-                         'This is an expected exception.')
+        self.assertEqual(
+            actual_record.extra_errors["teardown_test"].details,
+            "This is an expected exception.",
+        )
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -613,8 +622,7 @@
                 raise Exception(MSG_EXPECTED_EXCEPTION)
 
             def test_something(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION,
-                                      extras=MOCK_EXTRA)
+                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
@@ -623,12 +631,12 @@
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
         expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 1,
+            "Executed": 1,
+            "Failed": 0,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -648,12 +656,12 @@
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
         expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
+            "Error": 0,
+            "Executed": 1,
+            "Failed": 1,
+            "Passed": 0,
+            "Requested": 1,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -670,17 +678,16 @@
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_1', 'test_2', 'test_3'])
-        self.assertEqual(bt_cls.results.passed[0].test_name, 'test_1')
-        self.assertEqual(bt_cls.results.failed[0].details,
-                         MSG_EXPECTED_EXCEPTION)
+        bt_cls.run(test_names=["test_1", "test_2", "test_3"])
+        self.assertEqual(bt_cls.results.passed[0].test_name, "test_1")
+        self.assertEqual(bt_cls.results.failed[0].details, MSG_EXPECTED_EXCEPTION)
         expected_summary = {
-            'Error': 0,
-            'Executed': 2,
-            'Failed': 1,
-            'Passed': 1,
-            'Requested': 3,
-            'Skipped': 0
+            "Error": 0,
+            "Executed": 2,
+            "Failed": 1,
+            "Passed": 1,
+            "Requested": 3,
+            "Skipped": 0,
         }
         self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
 
@@ -691,9 +698,9 @@
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
+        bt_cls.run(test_names=["test_func"])
         actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
 
@@ -704,24 +711,22 @@
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
+        bt_cls.run(test_names=["test_func"])
         actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
 
     def test_assert_true(self):
         class MockBaseTest(base_test.BaseTestClass):
             def test_func(self):
-                asserts.assert_true(False,
-                                    MSG_EXPECTED_EXCEPTION,
-                                    extras=MOCK_EXTRA)
+                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
+        bt_cls.run(test_names=["test_func"])
         actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
 
@@ -733,7 +738,7 @@
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertIsNone(actual_record.details)
         self.assertIsNone(actual_record.extras)
 
@@ -745,23 +750,22 @@
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertIn('1 != 2', actual_record.details)
+        self.assertEqual(actual_record.test_name, "test_func")
+        self.assertIn("1 != 2", actual_record.details)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
 
     def test_assert_equal_fail_with_msg(self):
         class MockBaseTest(base_test.BaseTestClass):
             def test_func(self):
-                asserts.assert_equal(1,
-                                     2,
-                                     msg=MSG_EXPECTED_EXCEPTION,
-                                     extras=MOCK_EXTRA)
+                asserts.assert_equal(
+                    1, 2, msg=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
+                )
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        expected_msg = '1 != 2 ' + MSG_EXPECTED_EXCEPTION
+        self.assertEqual(actual_record.test_name, "test_func")
+        expected_msg = "1 != 2 " + MSG_EXPECTED_EXCEPTION
         self.assertIn(expected_msg, actual_record.details)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
 
@@ -774,7 +778,7 @@
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertIsNone(actual_record.details)
         self.assertIsNone(actual_record.extras)
 
@@ -782,15 +786,14 @@
         class MockBaseTest(base_test.BaseTestClass):
             def test_func(self):
                 with asserts.assert_raises_regex(
-                        SomeError,
-                        expected_regex=MSG_EXPECTED_EXCEPTION,
-                        extras=MOCK_EXTRA):
+                    SomeError, expected_regex=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
+                ):
                     raise SomeError(MSG_EXPECTED_EXCEPTION)
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertIsNone(actual_record.details)
         self.assertIsNone(actual_record.extras)
 
@@ -798,35 +801,34 @@
         class MockBaseTest(base_test.BaseTestClass):
             def test_func(self):
                 with asserts.assert_raises_regex(
-                        SomeError,
-                        expected_regex=MSG_EXPECTED_EXCEPTION,
-                        extras=MOCK_EXTRA):
+                    SomeError, expected_regex=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
+                ):
                     pass
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertEqual(actual_record.details, 'SomeError not raised')
+        self.assertEqual(actual_record.test_name, "test_func")
+        self.assertEqual(actual_record.details, "SomeError not raised")
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
 
     def test_assert_raises_fail_with_wrong_regex(self):
-        wrong_msg = 'ha'
+        wrong_msg = "ha"
 
         class MockBaseTest(base_test.BaseTestClass):
             def test_func(self):
                 with asserts.assert_raises_regex(
-                        SomeError,
-                        expected_regex=MSG_EXPECTED_EXCEPTION,
-                        extras=MOCK_EXTRA):
+                    SomeError, expected_regex=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
+                ):
                     raise SomeError(wrong_msg)
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        expected_details = ('"This is an expected exception." does not match '
-                            '"%s"') % wrong_msg
+        self.assertEqual(actual_record.test_name, "test_func")
+        expected_details = (
+            '"This is an expected exception." does not match ' '"%s"'
+        ) % wrong_msg
         self.assertEqual(actual_record.details, expected_details)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
 
@@ -834,29 +836,27 @@
         class MockBaseTest(base_test.BaseTestClass):
             def test_func(self):
                 with asserts.assert_raises_regex(
-                        SomeError,
-                        expected_regex=MSG_EXPECTED_EXCEPTION,
-                        extras=MOCK_EXTRA):
+                    SomeError, expected_regex=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
+                ):
                     raise AttributeError(MSG_UNEXPECTED_EXCEPTION)
 
         bt_cls = MockBaseTest(self.test_run_config)
         bt_cls.run()
         actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertEqual(actual_record.details, MSG_UNEXPECTED_EXCEPTION)
         self.assertIsNone(actual_record.extras)
 
     def test_explicit_pass(self):
         class MockBaseTest(base_test.BaseTestClass):
             def test_func(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION,
-                                      extras=MOCK_EXTRA)
+                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
+        bt_cls.run(test_names=["test_func"])
         actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
 
@@ -866,9 +866,9 @@
                 pass
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
+        bt_cls.run(test_names=["test_func"])
         actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertIsNone(actual_record.details)
         self.assertIsNone(actual_record.extras)
 
@@ -879,9 +879,9 @@
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
+        bt_cls.run(test_names=["test_func"])
         actual_record = bt_cls.results.skipped[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
 
@@ -889,32 +889,31 @@
         class MockBaseTest(base_test.BaseTestClass):
             def test_func(self):
                 asserts.skip_if(False, MSG_UNEXPECTED_EXCEPTION)
-                asserts.skip_if(True,
-                                MSG_EXPECTED_EXCEPTION,
-                                extras=MOCK_EXTRA)
+                asserts.skip_if(True, MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
                 never_call()
 
         bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
+        bt_cls.run(test_names=["test_func"])
         actual_record = bt_cls.results.skipped[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
+        self.assertEqual(actual_record.test_name, "test_func")
         self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
         self.assertEqual(actual_record.extras, MOCK_EXTRA)
 
     def test_unpack_userparams_required(self):
         """Missing a required param should raise an error."""
-        required = ['some_param']
+        required = ["some_param"]
         bc = base_test.BaseTestClass(self.test_run_config)
         bc.unpack_userparams(required)
-        expected_value = self.test_run_config.user_params['some_param']
+        expected_value = self.test_run_config.user_params["some_param"]
         self.assertEqual(bc.some_param, expected_value)
 
     def test_unpack_userparams_required_missing(self):
         """Missing a required param should raise an error."""
-        required = ['something']
+        required = ["something"]
         bc = base_test.BaseTestClass(self.test_run_config)
-        expected_msg = ('Missing required user param "%s" in test '
-                        'configuration.') % required[0]
+        expected_msg = (
+            'Missing required user param "%s" in test ' "configuration."
+        ) % required[0]
         with self.assertRaises(mobly_base_test.Error, msg=expected_msg):
             bc.unpack_userparams(required)
 
@@ -922,10 +921,10 @@
         """If an optional param is specified, the value should be what's in the
         config.
         """
-        opt = ['some_param']
+        opt = ["some_param"]
         bc = base_test.BaseTestClass(self.test_run_config)
         bc.unpack_userparams(opt_param_names=opt)
-        expected_value = self.test_run_config.user_params['some_param']
+        expected_value = self.test_run_config.user_params["some_param"]
         self.assertEqual(bc.some_param, expected_value)
 
     def test_unpack_userparams_optional_with_default(self):
@@ -933,16 +932,16 @@
         param is not in the config, the value should be the default value.
         """
         bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(optional_thing='whatever')
-        self.assertEqual(bc.optional_thing, 'whatever')
+        bc.unpack_userparams(optional_thing="whatever")
+        self.assertEqual(bc.optional_thing, "whatever")
 
     def test_unpack_userparams_default_overwrite_by_optional_param_list(self):
         """If an optional param is specified in kwargs, and the param is in the
         config, the value should be the one in the config.
         """
         bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(some_param='whatever')
-        expected_value = self.test_run_config.user_params['some_param']
+        bc.unpack_userparams(some_param="whatever")
+        expected_value = self.test_run_config.user_params["some_param"]
         self.assertEqual(bc.some_param, expected_value)
 
     def test_unpack_userparams_default_overwrite_by_required_param_list(self):
@@ -952,33 +951,33 @@
         thrown.
         """
         bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(req_param_names=['a_kwarg_param'],
-                             a_kwarg_param='whatever')
-        self.assertEqual(bc.a_kwarg_param, 'whatever')
+        bc.unpack_userparams(
+            req_param_names=["a_kwarg_param"], a_kwarg_param="whatever"
+        )
+        self.assertEqual(bc.a_kwarg_param, "whatever")
 
     def test_unpack_userparams_optional_missing(self):
         """Missing an optional param should not raise an error."""
-        opt = ['something']
+        opt = ["something"]
         bc = base_test.BaseTestClass(self.test_run_config)
         bc.unpack_userparams(opt_param_names=opt)
 
     def test_unpack_userparams_basic(self):
         """Required and optional params are unpacked properly."""
-        required = ['something']
-        optional = ['something_else']
+        required = ["something"]
+        optional = ["something_else"]
         configs = self.test_run_config.copy()
-        configs.user_params['something'] = 42
-        configs.user_params['something_else'] = 53
+        configs.user_params["something"] = 42
+        configs.user_params["something_else"] = 53
         bc = base_test.BaseTestClass(configs)
-        bc.unpack_userparams(req_param_names=required,
-                             opt_param_names=optional)
+        bc.unpack_userparams(req_param_names=required, opt_param_names=optional)
         self.assertEqual(bc.something, 42)
         self.assertEqual(bc.something_else, 53)
 
     def test_unpack_userparams_default_overwrite(self):
-        default_arg_val = 'haha'
-        actual_arg_val = 'wawa'
-        arg_name = 'arg1'
+        default_arg_val = "haha"
+        actual_arg_val = "wawa"
+        arg_name = "arg1"
         configs = self.test_run_config.copy()
         configs.user_params[arg_name] = actual_arg_val
         bc = base_test.BaseTestClass(configs)
@@ -987,19 +986,19 @@
 
     def test_unpack_userparams_default_None(self):
         bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(arg1='haha')
-        self.assertEqual(bc.arg1, 'haha')
+        bc.unpack_userparams(arg1="haha")
+        self.assertEqual(bc.arg1, "haha")
 
     def test_register_controller_no_config(self):
         base_cls = base_test.BaseTestClass(self.test_run_config)
-        with self.assertRaisesRegexp(signals.ControllerError,
-                                     'No corresponding config found for'):
+        with self.assertRaisesRegexp(
+            signals.ControllerError, "No corresponding config found for"
+        ):
             base_cls.register_controller(mock_controller)
 
     def test_register_optional_controller_no_config(self):
         base_cls = base_test.BaseTestClass(self.test_run_config)
-        self.assertIsNone(
-            base_cls.register_controller(mock_controller, required=False))
+        self.assertIsNone(base_cls.register_controller(mock_controller, required=False))
 
     def test_register_controller_third_party_dup_register(self):
         """Verifies correctness of registration, internal tally of controllers
@@ -1009,17 +1008,18 @@
         mock_test_config = self.test_run_config.copy()
         mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
         mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-            'magic1', 'magic2'
+            "magic1",
+            "magic2",
         ]
         base_cls = base_test.BaseTestClass(mock_test_config)
         base_cls.register_controller(mock_controller)
-        registered_name = 'mock_controller'
+        registered_name = "mock_controller"
         controller_objects = base_cls._controller_manager._controller_objects
         self.assertTrue(registered_name in controller_objects)
         mock_ctrlrs = controller_objects[registered_name]
-        self.assertEqual(mock_ctrlrs[0].magic, 'magic1')
-        self.assertEqual(mock_ctrlrs[1].magic, 'magic2')
-        expected_msg = 'Controller module .* has already been registered.'
+        self.assertEqual(mock_ctrlrs[0].magic, "magic1")
+        self.assertEqual(mock_ctrlrs[1].magic, "magic2")
+        expected_msg = "Controller module .* has already been registered."
         with self.assertRaisesRegexp(signals.ControllerError, expected_msg):
             base_cls.register_controller(mock_controller)
 
@@ -1031,11 +1031,12 @@
         mock_test_config = self.test_run_config.copy()
         mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
         mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-            'magic1', 'magic2'
+            "magic1",
+            "magic2",
         ]
         base_cls = base_test.BaseTestClass(mock_test_config)
         base_cls.register_controller(mock_controller, required=False)
-        expected_msg = 'Controller module .* has already been registered.'
+        expected_msg = "Controller module .* has already been registered."
         with self.assertRaisesRegexp(signals.ControllerError, expected_msg):
             base_cls.register_controller(mock_controller, required=False)
 
@@ -1045,88 +1046,83 @@
         """
         mock_test_config = self.test_run_config.copy()
         mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_ref_name = 'haha'
-        setattr(mock_controller, 'ACTS_CONTROLLER_REFERENCE_NAME',
-                mock_ref_name)
+        mock_ref_name = "haha"
+        setattr(mock_controller, "ACTS_CONTROLLER_REFERENCE_NAME", mock_ref_name)
         try:
             mock_ctrlr_ref_name = mock_controller.ACTS_CONTROLLER_REFERENCE_NAME
             mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-                'magic1', 'magic2'
+                "magic1",
+                "magic2",
             ]
             base_cls = base_test.BaseTestClass(mock_test_config)
             base_cls.register_controller(mock_controller, builtin=True)
             self.assertTrue(hasattr(base_cls, mock_ref_name))
-            self.assertTrue(mock_controller.__name__ in
-                            base_cls._controller_manager._controller_objects)
+            self.assertTrue(
+                mock_controller.__name__
+                in base_cls._controller_manager._controller_objects
+            )
             mock_ctrlrs = getattr(base_cls, mock_ctrlr_ref_name)
-            self.assertEqual(mock_ctrlrs[0].magic, 'magic1')
-            self.assertEqual(mock_ctrlrs[1].magic, 'magic2')
-            expected_msg = 'Controller module .* has already been registered.'
-            with self.assertRaisesRegexp(signals.ControllerError,
-                                         expected_msg):
+            self.assertEqual(mock_ctrlrs[0].magic, "magic1")
+            self.assertEqual(mock_ctrlrs[1].magic, "magic2")
+            expected_msg = "Controller module .* has already been registered."
+            with self.assertRaisesRegexp(signals.ControllerError, expected_msg):
                 base_cls.register_controller(mock_controller, builtin=True)
         finally:
-            delattr(mock_controller, 'ACTS_CONTROLLER_REFERENCE_NAME')
+            delattr(mock_controller, "ACTS_CONTROLLER_REFERENCE_NAME")
 
     def test_register_controller_no_get_info(self):
         mock_test_config = self.test_run_config.copy()
         mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_ref_name = 'haha'
-        get_info = getattr(mock_controller, 'get_info')
-        delattr(mock_controller, 'get_info')
+        mock_ref_name = "haha"
+        get_info = getattr(mock_controller, "get_info")
+        delattr(mock_controller, "get_info")
         try:
             mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-                'magic1', 'magic2'
+                "magic1",
+                "magic2",
             ]
             base_cls = base_test.BaseTestClass(mock_test_config)
             base_cls.register_controller(mock_controller)
             self.assertEqual(base_cls.results.controller_info, [])
         finally:
-            setattr(mock_controller, 'get_info', get_info)
+            setattr(mock_controller, "get_info", get_info)
 
     def test_register_controller_return_value(self):
         mock_test_config = self.test_run_config.copy()
         mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
         mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-            'magic1', 'magic2'
+            "magic1",
+            "magic2",
         ]
         base_cls = base_test.BaseTestClass(mock_test_config)
         magic_devices = base_cls.register_controller(mock_controller)
-        self.assertEqual(magic_devices[0].magic, 'magic1')
-        self.assertEqual(magic_devices[1].magic, 'magic2')
+        self.assertEqual(magic_devices[0].magic, "magic1")
+        self.assertEqual(magic_devices[1].magic, "magic2")
 
     def test_handle_file_user_params_does_not_overwrite_existing_params(self):
         test_run_config = self.test_run_config.copy()
         test_run_config.user_params = {
-            'foo': ['good_value'],
-            'local_files': {
-                'foo': ['bad_value']
-            }
+            "foo": ["good_value"],
+            "local_files": {"foo": ["bad_value"]},
         }
         test = base_test.BaseTestClass(test_run_config)
 
-        self.assertEqual(test.user_params['foo'], ['good_value'])
+        self.assertEqual(test.user_params["foo"], ["good_value"])
 
     def test_handle_file_user_params_dumps_files_dict(self):
         test_run_config = self.test_run_config.copy()
-        test_run_config.user_params = {
-            'my_files': {
-                'foo': ['good_value']
-            }
-        }
+        test_run_config.user_params = {"my_files": {"foo": ["good_value"]}}
         test = base_test.BaseTestClass(test_run_config)
 
-        self.assertEqual(test.user_params['foo'], ['good_value'])
+        self.assertEqual(test.user_params["foo"], ["good_value"])
 
     def test_handle_file_user_params_is_called_in_init(self):
         test_run_config = self.test_run_config.copy()
-        test_run_config.user_params['files'] = {
-            'file_a': ['/some/path']
-        }
+        test_run_config.user_params["files"] = {"file_a": ["/some/path"]}
         test = base_test.BaseTestClass(test_run_config)
 
-        self.assertEqual(test.user_params['file_a'], ['/some/path'])
+        self.assertEqual(test.user_params["file_a"], ["/some/path"])
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/acts_context_test.py b/src/antlion/unit_tests/acts_context_test.py
index 9f634c2..5fe4f34 100755
--- a/src/antlion/unit_tests/acts_context_test.py
+++ b/src/antlion/unit_tests/acts_context_test.py
@@ -36,15 +36,14 @@
 from mock import Mock
 from mock import patch
 
-
-LOGGING = 'antlion.context.logging'
+LOGGING = "antlion.context.logging"
 
 
 def reset_context():
     context._contexts = [RootContext()]
 
 
-TEST_CASE = 'test_case_name'
+TEST_CASE = "test_case_name"
 
 
 class TestClass:
@@ -142,32 +141,32 @@
         context = TestContext()
         mock_path = Mock()
 
-        context.add_base_output_path('basepath', mock_path)
+        context.add_base_output_path("basepath", mock_path)
 
-        self.assertEqual(context.get_base_output_path('basepath'), mock_path)
+        self.assertEqual(context.get_base_output_path("basepath"), mock_path)
 
     def test_get_subcontext_returns_empty_string_by_default(self):
         context = TestContext()
 
-        self.assertEqual(context.get_subcontext(), '')
+        self.assertEqual(context.get_subcontext(), "")
 
     def test_add_subcontext_sets_correct_path(self):
         context = TestContext()
         mock_path = Mock()
 
-        context.add_subcontext('subcontext', mock_path)
+        context.add_subcontext("subcontext", mock_path)
 
-        self.assertEqual(context.get_subcontext('subcontext'), mock_path)
+        self.assertEqual(context.get_subcontext("subcontext"), mock_path)
 
     @patch(LOGGING)
-    @patch('os.makedirs')
+    @patch("os.makedirs")
     def test_get_full_output_path_returns_correct_path(self, *_):
         context = TestClassContext(TestClass())
-        context.add_base_output_path('foo', 'base/path')
-        context.add_subcontext('foo', 'subcontext')
+        context.add_base_output_path("foo", "base/path")
+        context.add_subcontext("foo", "subcontext")
 
-        full_path = 'base/path/TestClass/subcontext'
-        self.assertEqual(context.get_full_output_path('foo'), full_path)
+        full_path = "base/path/TestClass/subcontext"
+        self.assertEqual(context.get_full_output_path("foo"), full_path)
 
     def test_identifier_not_implemented(self):
         context = TestContext()
@@ -187,6 +186,7 @@
     def test_get_class_name(self):
         class TestClass:
             pass
+
         test_class = TestClass()
         context = TestClassContext(test_class)
 
@@ -195,6 +195,7 @@
     def test_context_dir_is_class_name(self):
         class TestClass:
             pass
+
         test_class = TestClass()
         context = TestClassContext(test_class)
 
@@ -203,6 +204,7 @@
     def test_identifier_is_class_name(self):
         class TestClass:
             pass
+
         test_class = TestClass()
         context = TestClassContext(test_class)
 
@@ -231,16 +233,16 @@
         test_class = TestClass()
         context = TestCaseContext(test_class, TEST_CASE)
 
-        context_dir = TestClass.__name__ + '/' + TEST_CASE
+        context_dir = TestClass.__name__ + "/" + TEST_CASE
         self.assertEqual(context._get_default_context_dir(), context_dir)
 
     def test_identifier_is_class_and_test_case_name(self):
         test_class = TestClass()
         context = TestCaseContext(test_class, TEST_CASE)
 
-        identifier = TestClass.__name__ + '.' + TEST_CASE
+        identifier = TestClass.__name__ + "." + TEST_CASE
         self.assertEqual(context.identifier, identifier)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/acts_error_test.py b/src/antlion/unit_tests/acts_error_test.py
index 737000d..2431bd3 100755
--- a/src/antlion/unit_tests/acts_error_test.py
+++ b/src/antlion/unit_tests/acts_error_test.py
@@ -19,28 +19,27 @@
 
 
 class ActsErrorTest(unittest.TestCase):
-
     def test_assert_key_pulled_from_acts_error_code(self):
         e = error.ActsError()
         self.assertEqual(e.error_code, 100)
 
     def test_assert_description_pulled_from_docstring(self):
         e = error.ActsError()
-        self.assertEqual(e.error_doc, 'Base Acts Error')
+        self.assertEqual(e.error_doc, "Base Acts Error")
 
     def test_error_without_args(self):
         e = error.ActsError()
-        self.assertEqual(e.details, '')
+        self.assertEqual(e.details, "")
 
     def test_error_with_args(self):
-        args = ('hello', )
+        args = ("hello",)
         e = error.ActsError(*args)
-        self.assertEqual(e.details, 'hello')
+        self.assertEqual(e.details, "hello")
 
     def test_error_with_kwargs(self):
-        e = error.ActsError(key='value')
-        self.assertIn(('key', 'value'), e.extras.items())
+        e = error.ActsError(key="value")
+        self.assertIn(("key", "value"), e.extras.items())
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/acts_host_utils_test.py b/src/antlion/unit_tests/acts_host_utils_test.py
index 17e389c..cc77f4f 100755
--- a/src/antlion/unit_tests/acts_host_utils_test.py
+++ b/src/antlion/unit_tests/acts_host_utils_test.py
@@ -27,7 +27,7 @@
 
     def test_detects_udp_port_in_use(self):
         test_s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-        test_s.bind(('localhost', 0))
+        test_s.bind(("localhost", 0))
         port = test_s.getsockname()[1]
         try:
             self.assertFalse(host_utils.is_port_available(port))
@@ -36,7 +36,7 @@
 
     def test_detects_tcp_port_in_use(self):
         test_s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        test_s.bind(('localhost', 0))
+        test_s.bind(("localhost", 0))
         port = test_s.getsockname()[1]
         try:
             self.assertFalse(host_utils.is_port_available(port))
diff --git a/src/antlion/unit_tests/acts_import_unit_test.py b/src/antlion/unit_tests/acts_import_unit_test.py
index adbddde..55a340f 100755
--- a/src/antlion/unit_tests/acts_import_unit_test.py
+++ b/src/antlion/unit_tests/acts_import_unit_test.py
@@ -14,50 +14,28 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import importlib.machinery
 import os
 import re
-import sys
+import unittest
 import uuid
 
-if sys.version_info < (3, ):
-    import warnings
 
-    with warnings.catch_warnings():
-        warnings.filterwarnings('ignore', category=PendingDeprecationWarning)
-        import imp
-
-    import importlib
-    import unittest2 as unittest
-
-    def import_module(name, path):
-        return imp.load_source(name, path)
-
-    def import_acts():
-        return importlib.import_module('antlion')
-else:
-    import importlib.machinery
-    import unittest
-
-    def import_module(name, path):
-        return importlib.machinery.SourceFileLoader(name, path).load_module()
-
-    def import_acts():
-        return importlib.import_module('antlion')
+def import_module(name, path):
+    return importlib.machinery.SourceFileLoader(name, path).load_module()
 
 
-PY_FILE_REGEX = re.compile('.+\.py$')
+def import_acts():
+    return importlib.import_module("antlion")
+
+
+PY_FILE_REGEX = re.compile(".+\.py$")
 
 DENYLIST = [
-    'antlion/controllers/rohdeschwarz_lib/contest.py',
-    'antlion/controllers/native.py',
-    'antlion/controllers/native_android_device.py',
-    'antlion/controllers/packet_sender.py',
-    'antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py'
+    "antlion/controllers/packet_sender.py",
 ]
 
-DENYLIST_DIRECTORIES = [
-    'antlion/controllers/buds_lib'
-]
+DENYLIST_DIRECTORIES = []
 
 
 class ActsImportUnitTest(unittest.TestCase):
@@ -73,7 +51,7 @@
     def test_import_framework_successful(self):
         """Dynamically test all imports from the framework."""
         acts = import_acts()
-        if hasattr(acts, '__path__') and len(antlion.__path__) > 0:
+        if hasattr(acts, "__path__") and len(antlion.__path__) > 0:
             acts_path = antlion.__path__[0]
         else:
             acts_path = os.path.dirname(antlion.__file__)
@@ -81,19 +59,19 @@
         for root, _, files in os.walk(acts_path):
             for f in files:
                 full_path = os.path.join(root, f)
-                if (any(full_path.endswith(e) for e in DENYLIST)
-                        or any(e in full_path
-                               for e in DENYLIST_DIRECTORIES)):
+                if any(full_path.endswith(e) for e in DENYLIST) or any(
+                    e in full_path for e in DENYLIST_DIRECTORIES
+                ):
                     continue
 
                 path = os.path.relpath(os.path.join(root, f), os.getcwd())
 
                 if PY_FILE_REGEX.match(full_path):
-                    with self.subTest(msg='import %s' % path):
+                    with self.subTest(msg="import %s" % path):
                         fake_module_name = str(uuid.uuid4())
                         module = import_module(fake_module_name, path)
                         self.assertIsNotNone(module)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/acts_job_test.py b/src/antlion/unit_tests/acts_job_test.py
index 12ff68f..a900d73 100755
--- a/src/antlion/unit_tests/acts_job_test.py
+++ b/src/antlion/unit_tests/acts_job_test.py
@@ -21,7 +21,7 @@
 
 from antlion.libs.proc import job
 
-if os.name == 'posix' and sys.version_info[0] < 3:
+if os.name == "posix" and sys.version_info[0] < 3:
     import subprocess32 as subprocess
 else:
     import subprocess
@@ -30,22 +30,15 @@
 class FakePopen(object):
     """A fake version of the object returned from subprocess.Popen()."""
 
-    def __init__(self,
-                 stdout=None,
-                 stderr=None,
-                 returncode=0,
-                 will_timeout=False):
+    def __init__(self, stdout=None, stderr=None, returncode=0, will_timeout=False):
         self.returncode = returncode
-        self._stdout = bytes(stdout,
-                             'utf-8') if stdout is not None else bytes()
-        self._stderr = bytes(stderr,
-                             'utf-8') if stderr is not None else bytes()
+        self._stdout = bytes(stdout, "utf-8") if stdout is not None else bytes()
+        self._stderr = bytes(stderr, "utf-8") if stderr is not None else bytes()
         self._will_timeout = will_timeout
 
     def communicate(self, timeout=None):
         if self._will_timeout:
-            raise subprocess.TimeoutExpired(
-                -1, 'Timed out according to test logic')
+            raise subprocess.TimeoutExpired(-1, "Timed out according to test logic")
         return self._stdout, self._stderr
 
     def kill(self):
@@ -57,65 +50,70 @@
 
 class JobTestCases(unittest.TestCase):
     @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(stdout='TEST\n'))
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(stdout="TEST\n"),
+    )
     def test_run_success(self, popen):
         """Test running a simple shell command."""
-        result = job.run('echo TEST')
-        self.assertTrue(result.stdout.startswith('TEST'))
+        result = job.run("echo TEST")
+        self.assertTrue(result.stdout.startswith("TEST"))
 
     @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(stderr='TEST\n'))
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(stderr="TEST\n"),
+    )
     def test_run_stderr(self, popen):
         """Test that we can read process stderr."""
-        result = job.run('echo TEST 1>&2')
+        result = job.run("echo TEST 1>&2")
         self.assertEqual(len(result.stdout), 0)
-        self.assertTrue(result.stderr.startswith('TEST'))
+        self.assertTrue(result.stderr.startswith("TEST"))
         self.assertFalse(result.stdout)
 
     @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(returncode=1))
+        "antlion.libs.proc.job.subprocess.Popen", return_value=FakePopen(returncode=1)
+    )
     def test_run_error(self, popen):
         """Test that we raise on non-zero exit statuses."""
-        self.assertRaises(job.Error, job.run, 'exit 1')
+        self.assertRaises(job.Error, job.run, "exit 1")
 
     @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(returncode=1))
+        "antlion.libs.proc.job.subprocess.Popen", return_value=FakePopen(returncode=1)
+    )
     def test_run_with_ignored_error(self, popen):
         """Test that we can ignore exit status on request."""
-        result = job.run('exit 1', ignore_status=True)
+        result = job.run("exit 1", ignore_status=True)
         self.assertEqual(result.exit_status, 1)
 
     @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(will_timeout=True))
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(will_timeout=True),
+    )
     def test_run_timeout(self, popen):
         """Test that we correctly implement command timeouts."""
-        self.assertRaises(job.Error, job.run, 'sleep 5', timeout=0.1)
+        self.assertRaises(job.Error, job.run, "sleep 5", timeout=0.1)
 
     @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(stdout='TEST\n'))
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(stdout="TEST\n"),
+    )
     def test_run_no_shell(self, popen):
         """Test that we handle running without a wrapping shell."""
-        result = job.run(['echo', 'TEST'])
-        self.assertTrue(result.stdout.startswith('TEST'))
+        result = job.run(["echo", "TEST"])
+        self.assertTrue(result.stdout.startswith("TEST"))
 
     @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(stdout='TEST\n'))
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(stdout="TEST\n"),
+    )
     def test_job_env(self, popen):
         """Test that we can set environment variables correctly."""
-        test_env = {'MYTESTVAR': '20'}
-        result = job.run('printenv', env=test_env.copy())
+        test_env = {"MYTESTVAR": "20"}
+        result = job.run("printenv", env=test_env.copy())
         popen.assert_called_once()
         _, kwargs = popen.call_args
-        self.assertTrue('env' in kwargs)
-        self.assertEqual(kwargs['env'], test_env)
+        self.assertTrue("env" in kwargs)
+        self.assertEqual(kwargs["env"], test_env)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/acts_logger_test.py b/src/antlion/unit_tests/acts_logger_test.py
index 2b6eeed..f46e79a 100755
--- a/src/antlion/unit_tests/acts_logger_test.py
+++ b/src/antlion/unit_tests/acts_logger_test.py
@@ -22,11 +22,10 @@
 
 
 class ActsLoggerTest(unittest.TestCase):
-    """Verifies code in antlion.logger module.
-    """
+    """Verifies code in antlion.logger module."""
 
     def test_epoch_to_log_line_timestamp(self):
-        os.environ['TZ'] = 'US/Pacific'
+        os.environ["TZ"] = "US/Pacific"
         time.tzset()
         actual_stamp = logger.epoch_to_log_line_timestamp(1469134262116)
         self.assertEqual("2016-07-21 13:51:02.116", actual_stamp)
diff --git a/src/antlion/unit_tests/acts_records_test.py b/src/antlion/unit_tests/acts_records_test.py
index 8280f27..7816c61 100755
--- a/src/antlion/unit_tests/acts_records_test.py
+++ b/src/antlion/unit_tests/acts_records_test.py
@@ -23,8 +23,7 @@
 
 
 class ActsRecordsTest(unittest.TestCase):
-    """This test class tests the implementation of classes in antlion.records.
-    """
+    """This test class tests the implementation of classes in antlion.records."""
 
     def setUp(self):
         self.tn = "test_name"
@@ -50,8 +49,7 @@
         d[records.TestResultEnums.RECORD_EXTRAS] = extras
         d[records.TestResultEnums.RECORD_BEGIN_TIME] = record.begin_time
         d[records.TestResultEnums.RECORD_END_TIME] = record.end_time
-        d[records.TestResultEnums.
-          RECORD_LOG_BEGIN_TIME] = record.log_begin_time
+        d[records.TestResultEnums.RECORD_LOG_BEGIN_TIME] = record.log_begin_time
         d[records.TestResultEnums.RECORD_LOG_END_TIME] = record.log_end_time
         d[records.TestResultEnums.RECORD_UID] = None
         d[records.TestResultEnums.RECORD_CLASS] = None
@@ -63,8 +61,9 @@
         # results.
         self.assertTrue(str(record), "str of the record should not be empty.")
         self.assertTrue(repr(record), "the record's repr shouldn't be empty.")
-        self.assertTrue(record.json_str(), ("json str of the record should "
-                                            "not be empty."))
+        self.assertTrue(
+            record.json_str(), ("json str of the record should " "not be empty.")
+        )
 
     """ Begin of Tests """
 
@@ -76,7 +75,8 @@
             record=record,
             result=records.TestResultEnums.TEST_RESULT_PASS,
             details=None,
-            extras=None)
+            extras=None,
+        )
 
     def test_result_record_pass_with_float_extra(self):
         record = records.TestResultRecord(self.tn)
@@ -87,7 +87,8 @@
             record=record,
             result=records.TestResultEnums.TEST_RESULT_PASS,
             details=self.details,
-            extras=self.float_extra)
+            extras=self.float_extra,
+        )
 
     def test_result_record_pass_with_json_extra(self):
         record = records.TestResultRecord(self.tn)
@@ -98,7 +99,8 @@
             record=record,
             result=records.TestResultEnums.TEST_RESULT_PASS,
             details=self.details,
-            extras=self.json_extra)
+            extras=self.json_extra,
+        )
 
     def test_result_record_fail_none(self):
         record = records.TestResultRecord(self.tn)
@@ -108,7 +110,8 @@
             record=record,
             result=records.TestResultEnums.TEST_RESULT_FAIL,
             details=None,
-            extras=None)
+            extras=None,
+        )
 
     def test_result_record_fail_with_float_extra(self):
         record = records.TestResultRecord(self.tn)
@@ -119,7 +122,8 @@
             record=record,
             result=records.TestResultEnums.TEST_RESULT_FAIL,
             details=self.details,
-            extras=self.float_extra)
+            extras=self.float_extra,
+        )
 
     def test_result_record_fail_with_json_extra(self):
         record = records.TestResultRecord(self.tn)
@@ -130,7 +134,8 @@
             record=record,
             result=records.TestResultEnums.TEST_RESULT_FAIL,
             details=self.details,
-            extras=self.json_extra)
+            extras=self.json_extra,
+        )
 
     def test_result_record_skip_none(self):
         record = records.TestResultRecord(self.tn)
@@ -140,7 +145,8 @@
             record=record,
             result=records.TestResultEnums.TEST_RESULT_SKIP,
             details=None,
-            extras=None)
+            extras=None,
+        )
 
     def test_result_record_skip_with_float_extra(self):
         record = records.TestResultRecord(self.tn)
@@ -151,7 +157,8 @@
             record=record,
             result=records.TestResultEnums.TEST_RESULT_SKIP,
             details=self.details,
-            extras=self.float_extra)
+            extras=self.float_extra,
+        )
 
     def test_result_record_skip_with_json_extra(self):
         record = records.TestResultRecord(self.tn)
@@ -162,7 +169,8 @@
             record=record,
             result=records.TestResultEnums.TEST_RESULT_SKIP,
             details=self.details,
-            extras=self.json_extra)
+            extras=self.json_extra,
+        )
 
     def test_result_add_operator_success(self):
         record1 = records.TestResultRecord(self.tn)
@@ -171,7 +179,7 @@
         record1.test_pass(s)
         tr1 = records.TestResult()
         tr1.add_record(record1)
-        device1 = ControllerInfoRecord('TestClass', 'MockDevice', 'device1')
+        device1 = ControllerInfoRecord("TestClass", "MockDevice", "device1")
         tr1.add_controller_info_record(device1)
         record2 = records.TestResultRecord(self.tn)
         record2.test_begin()
@@ -179,7 +187,7 @@
         record2.test_pass(s)
         tr2 = records.TestResult()
         tr2.add_record(record2)
-        device2 = ControllerInfoRecord('TestClass', 'MockDevice', 'device2')
+        device2 = ControllerInfoRecord("TestClass", "MockDevice", "device2")
         tr2.add_controller_info_record(device2)
         tr2 += tr1
         self.assertTrue(tr2.passed, [tr1, tr2])
diff --git a/src/antlion/unit_tests/acts_relay_controller_test.py b/src/antlion/unit_tests/acts_relay_controller_test.py
deleted file mode 100755
index f1d066d..0000000
--- a/src/antlion/unit_tests/acts_relay_controller_test.py
+++ /dev/null
@@ -1,826 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-import shutil
-import tempfile
-import unittest
-
-import antlion.controllers.relay_lib.fugu_remote as fugu_remote
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.errors import RelayDeviceConnectionError
-from antlion.controllers.relay_lib.generic_relay_device import GenericRelayDevice
-from antlion.controllers.relay_lib.relay import Relay
-from antlion.controllers.relay_lib.relay import RelayDict
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.relay import SynchronizeRelays
-from antlion.controllers.relay_lib.relay_board import RelayBoard
-from antlion.controllers.relay_lib.relay_device import RelayDevice
-from antlion.controllers.relay_lib.relay_rig import RelayRig
-from antlion.controllers.relay_lib.sain_smart_board import SainSmartBoard
-from mock import patch
-
-
-class MockBoard(RelayBoard):
-    def __init__(self, config):
-        self.relay_states = dict()
-        self.relay_previous_states = dict()
-        RelayBoard.__init__(self, config)
-
-    def get_relay_position_list(self):
-        return [0, 1]
-
-    def get_relay_status(self, relay_position):
-        if relay_position not in self.relay_states:
-            self.relay_states[relay_position] = RelayState.NO
-            self.relay_previous_states[relay_position] = RelayState.NO
-        return self.relay_states[relay_position]
-
-    def set(self, relay_position, state):
-        self.relay_previous_states[relay_position] = self.get_relay_status(
-            relay_position)
-        self.relay_states[relay_position] = state
-        return state
-
-
-class ActsRelayTest(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.config = {
-            'name': 'MockBoard',
-            'relays': [{
-                'name': 'Relay',
-                'relay_pos': 0
-            }]
-        }
-        self.board = MockBoard(self.config)
-        self.relay = Relay(self.board, 'Relay')
-        self.board.set(self.relay.position, RelayState.NO)
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_turn_on_from_off(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.relay.set_nc()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-
-    def test_turn_on_from_on(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.relay.set_nc()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-
-    def test_turn_off_from_on(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.relay.set_no()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-
-    def test_turn_off_from_off(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.relay.set_no()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-
-    def test_toggle_off_to_on(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.relay.toggle()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-
-    def test_toggle_on_to_off(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.relay.toggle()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-
-    def test_set_on(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.relay.set(RelayState.NC)
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-
-    def test_set_off(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.relay.set(RelayState.NO)
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-
-    def test_set_foo(self):
-        with self.assertRaises(ValueError):
-            self.relay.set('FOO')
-
-    def test_set_nc_for(self):
-        # Here we set twice so relay_previous_state will also be OFF
-        self.board.set(self.relay.position, RelayState.NO)
-        self.board.set(self.relay.position, RelayState.NO)
-
-        self.relay.set_nc_for(0)
-
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-        self.assertEqual(self.board.relay_previous_states[self.relay.position],
-                         RelayState.NC)
-
-    def test_set_no_for(self):
-        # Here we set twice so relay_previous_state will also be OFF
-        self.board.set(self.relay.position, RelayState.NC)
-        self.board.set(self.relay.position, RelayState.NC)
-
-        self.relay.set_no_for(0)
-
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-        self.assertEqual(self.board.relay_previous_states[self.relay.position],
-                         RelayState.NO)
-
-    def test_get_status_on(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.assertEqual(self.relay.get_status(), RelayState.NC)
-
-    def test_get_status_off(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.assertEqual(self.relay.get_status(), RelayState.NO)
-
-    def test_clean_up_default_on(self):
-        new_relay = Relay(self.board, 0)
-        new_relay._original_state = RelayState.NO
-        self.board.set(new_relay.position, RelayState.NO)
-        new_relay.clean_up()
-
-        self.assertEqual(self.board.get_relay_status(new_relay.position),
-                         RelayState.NO)
-
-    def test_clean_up_default_off(self):
-        new_relay = Relay(self.board, 0)
-        new_relay._original_state = RelayState.NO
-        self.board.set(new_relay.position, RelayState.NC)
-        new_relay.clean_up()
-
-        self.assertEqual(self.board.get_relay_status(new_relay.position),
-                         RelayState.NO)
-
-    def test_clean_up_original_state_none(self):
-        val = 'STAYS_THE_SAME'
-        new_relay = Relay(self.board, 0)
-        # _original_state is none by default
-        # The line below sets the dict to an impossible value.
-        self.board.set(new_relay.position, val)
-        new_relay.clean_up()
-        # If the impossible value is cleared, then the test should fail.
-        self.assertEqual(self.board.get_relay_status(new_relay.position), val)
-
-
-class ActsSainSmartBoardTest(unittest.TestCase):
-    STATUS_MSG = ('<small><a href="{}"></a>'
-                  '</small><a href="{}/{}TUX">{}TUX</a><p>')
-
-    RELAY_ON_PAGE_CONTENTS = 'relay_on page'
-    RELAY_OFF_PAGE_CONTENTS = 'relay_off page'
-
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.test_dir = 'file://' + tempfile.mkdtemp() + '/'
-
-        # Creates the files used for testing
-        self._set_status_page('0000000000000000')
-        with open(self.test_dir[7:] + '00', 'w+') as file:
-            file.write(self.RELAY_OFF_PAGE_CONTENTS)
-        with open(self.test_dir[7:] + '01', 'w+') as file:
-            file.write(self.RELAY_ON_PAGE_CONTENTS)
-
-        self.config = ({
-            'name':
-            'SSBoard',
-            'base_url':
-            self.test_dir,
-            'relays': [{
-                'name': '0',
-                'relay_pos': 0
-            }, {
-                'name': '1',
-                'relay_pos': 1
-            }, {
-                'name': '2',
-                'relay_pos': 7
-            }]
-        })
-        self.ss_board = SainSmartBoard(self.config)
-        self.r0 = Relay(self.ss_board, 0)
-        self.r1 = Relay(self.ss_board, 1)
-        self.r7 = Relay(self.ss_board, 7)
-
-    def tearDown(self):
-        shutil.rmtree(self.test_dir[7:])
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_get_url_code(self):
-        result = self.ss_board._get_relay_url_code(self.r0.position,
-                                                   RelayState.NO)
-        self.assertEqual(result, '00')
-
-        result = self.ss_board._get_relay_url_code(self.r0.position,
-                                                   RelayState.NC)
-        self.assertEqual(result, '01')
-
-        result = self.ss_board._get_relay_url_code(self.r7.position,
-                                                   RelayState.NO)
-        self.assertEqual(result, '14')
-
-        result = self.ss_board._get_relay_url_code(self.r7.position,
-                                                   RelayState.NC)
-        self.assertEqual(result, '15')
-
-    def test_load_page_status(self):
-        self._set_status_page('0000111100001111')
-        result = self.ss_board._load_page(SainSmartBoard.HIDDEN_STATUS_PAGE)
-        self.assertTrue(
-            result.endswith(
-                '0000111100001111TUX">0000111100001111TUX</a><p>'))
-
-    def test_load_page_relay(self):
-        result = self.ss_board._load_page('00')
-        self.assertEqual(result, self.RELAY_OFF_PAGE_CONTENTS)
-
-        result = self.ss_board._load_page('01')
-        self.assertEqual(result, self.RELAY_ON_PAGE_CONTENTS)
-
-    def test_load_page_no_connection(self):
-        with self.assertRaises(RelayDeviceConnectionError):
-            self.ss_board._load_page('**')
-
-    def _set_status_page(self, status_16_chars):
-        with open(self.test_dir[7:] + '99', 'w+') as status_file:
-            status_file.write(
-                self.STATUS_MSG.format(self.test_dir[:-1], self.test_dir[:-1],
-                                       status_16_chars, status_16_chars))
-
-    def _test_sync_status_dict(self, status_16_chars):
-        self._set_status_page(status_16_chars)
-        expected_dict = dict()
-
-        for index, char in enumerate(status_16_chars):
-            expected_dict[
-                index] = RelayState.NC if char == '1' else RelayState.NO
-
-        self.ss_board._sync_status_dict()
-        self.assertDictEqual(expected_dict, self.ss_board.status_dict)
-
-    def test_sync_status_dict(self):
-        self._test_sync_status_dict('0000111100001111')
-        self._test_sync_status_dict('0000000000000000')
-        self._test_sync_status_dict('0101010101010101')
-        self._test_sync_status_dict('1010101010101010')
-        self._test_sync_status_dict('1111111111111111')
-
-    def test_get_relay_status_status_dict_none(self):
-        self._set_status_page('1111111111111111')
-        self.ss_board.status_dict = None
-        self.assertEqual(self.ss_board.get_relay_status(self.r0.position),
-                         RelayState.NC)
-
-    def test_get_relay_status_status_dict_on(self):
-        self.r0.set(RelayState.NC)
-        self.assertEqual(self.ss_board.get_relay_status(self.r0.position),
-                         RelayState.NC)
-
-    def test_get_relay_status_status_dict_off(self):
-        self.r0.set(RelayState.NO)
-        self.assertEqual(self.ss_board.get_relay_status(self.r0.position),
-                         RelayState.NO)
-
-    def test_set_on(self):
-        patch_path = 'antlion.controllers.relay_lib.sain_smart_board.urlopen'
-        with patch(patch_path) as urlopen:
-            board = SainSmartBoard(self.config)
-            board.status_dict = {}
-            board.set(self.r0.position, RelayState.NC)
-        urlopen.assert_called_once_with('%s%s' %
-                                        (self.ss_board.base_url, '01'))
-
-    def test_set_off(self):
-        patch_path = 'antlion.controllers.relay_lib.sain_smart_board.urlopen'
-        with patch(patch_path) as urlopen:
-            board = SainSmartBoard(self.config)
-            board.status_dict = {}
-            board.set(self.r0.position, RelayState.NO)
-        urlopen.assert_called_once_with('%s%s' %
-                                        (self.ss_board.base_url, '00'))
-
-    def test_connection_error_no_tux(self):
-        default_status_msg = self.STATUS_MSG
-        self.STATUS_MSG = self.STATUS_MSG.replace('TUX', '')
-        try:
-            self._set_status_page('1111111111111111')
-            self.ss_board.get_relay_status(0)
-        except RelayDeviceConnectionError:
-            self.STATUS_MSG = default_status_msg
-            return
-
-        self.fail('Should have thrown an error without TUX appearing.')
-
-
-class ActsRelayRigTest(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.config = {
-            'boards': [{
-                'type': 'SainSmartBoard',
-                'name': 'ss_control',
-                'base_url': 'http://192.168.1.4/30000/'
-            }, {
-                'type': 'SainSmartBoard',
-                'name': 'ss_control_2',
-                'base_url': 'http://192.168.1.4/30000/'
-            }],
-            'devices': [{
-                'type': 'GenericRelayDevice',
-                'name': 'device',
-                'relays': {
-                    'Relay00': 'ss_control/0',
-                    'Relay10': 'ss_control/1'
-                }
-            }]
-        }
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_init_relay_rig_missing_boards(self):
-        flawed_config = copy.deepcopy(self.config)
-        del flawed_config['boards']
-        with self.assertRaises(RelayConfigError):
-            RelayRig(flawed_config)
-
-    def test_init_relay_rig_is_not_list(self):
-        flawed_config = copy.deepcopy(self.config)
-        flawed_config['boards'] = self.config['boards'][0]
-        with self.assertRaises(RelayConfigError):
-            RelayRig(flawed_config)
-
-    def test_init_relay_rig_duplicate_board_names(self):
-        flawed_config = copy.deepcopy(self.config)
-        flawed_config['boards'][1]['name'] = (self.config['boards'][0]['name'])
-        with self.assertRaises(RelayConfigError):
-            RelayRigMock(flawed_config)
-
-    def test_init_relay_rig_device_gets_relays(self):
-        modded_config = copy.deepcopy(self.config)
-        del modded_config['devices'][0]['relays']['Relay00']
-        rig = RelayRigMock(modded_config)
-        self.assertEqual(len(rig.relays), 4)
-        self.assertEqual(len(rig.devices['device'].relays), 1)
-
-        rig = RelayRigMock(self.config)
-        self.assertEqual(len(rig.devices['device'].relays), 2)
-
-    def test_init_relay_rig_correct_device_type(self):
-        rig = RelayRigMock(self.config)
-        self.assertEqual(len(rig.devices), 1)
-        self.assertIsInstance(rig.devices['device'], GenericRelayDevice)
-
-    def test_init_relay_rig_missing_devices_creates_generic_device(self):
-        modded_config = copy.deepcopy(self.config)
-        del modded_config['devices']
-        rig = RelayRigMock(modded_config)
-        self.assertEqual(len(rig.devices), 1)
-        self.assertIsInstance(rig.devices['device'], GenericRelayDevice)
-        self.assertDictEqual(rig.devices['device'].relays, rig.relays)
-
-
-class RelayRigMock(RelayRig):
-    """A RelayRig that substitutes the MockBoard for any board."""
-
-    _board_constructors = {
-        'SainSmartBoard': lambda x: MockBoard(x),
-        'FuguMockBoard': lambda x: FuguMockBoard(x)
-    }
-
-    def __init__(self, config=None):
-        if not config:
-            config = {
-                "boards": [{
-                    'name': 'MockBoard',
-                    'type': 'SainSmartBoard'
-                }]
-            }
-
-        RelayRig.__init__(self, config)
-
-
-class ActsGenericRelayDeviceTest(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.board_config = {'name': 'MockBoard', 'type': 'SainSmartBoard'}
-
-        self.board = MockBoard(self.board_config)
-        self.r0 = self.board.relays[0]
-        self.r1 = self.board.relays[1]
-
-        self.device_config = {
-            'name': 'MockDevice',
-            'relays': {
-                'r0': 'MockBoard/0',
-                'r1': 'MockBoard/1'
-            }
-        }
-        config = {
-            'boards': [self.board_config],
-            'devices': [self.device_config]
-        }
-        self.rig = RelayRigMock(config)
-        self.rig.boards['MockBoard'] = self.board
-        self.rig.relays[self.r0.relay_id] = self.r0
-        self.rig.relays[self.r1.relay_id] = self.r1
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_setup_single_relay(self):
-        self.r0.set(RelayState.NC)
-        self.r1.set(RelayState.NC)
-
-        modified_config = copy.deepcopy(self.device_config)
-        del modified_config['relays']['r1']
-
-        grd = GenericRelayDevice(modified_config, self.rig)
-        grd.setup()
-
-        self.assertEqual(self.r0.get_status(), RelayState.NO)
-        self.assertEqual(self.r1.get_status(), RelayState.NC)
-
-    def test_setup_multiple_relays(self):
-        self.board.set(self.r0.position, RelayState.NC)
-        self.board.set(self.r1.position, RelayState.NC)
-
-        grd = GenericRelayDevice(self.device_config, self.rig)
-        grd.setup()
-
-        self.assertEqual(self.r0.get_status(), RelayState.NO)
-        self.assertEqual(self.r1.get_status(), RelayState.NO)
-
-    def test_cleanup_single_relay(self):
-        self.test_setup_single_relay()
-
-    def test_cleanup_multiple_relays(self):
-        self.test_setup_multiple_relays()
-
-    def change_state(self, begin_state, call, end_state, previous_state=None):
-        self.board.set(self.r0.position, begin_state)
-        grd = GenericRelayDevice(self.device_config, self.rig)
-        call(grd)
-        self.assertEqual(self.r0.get_status(), end_state)
-        if previous_state:
-            self.assertEqual(
-                self.board.relay_previous_states[self.r0.position],
-                previous_state)
-
-    def test_press_while_no(self):
-        self.change_state(RelayState.NO, lambda x: x.press('r0'),
-                          RelayState.NO, RelayState.NC)
-
-    def test_press_while_nc(self):
-        self.change_state(RelayState.NC, lambda x: x.press('r0'),
-                          RelayState.NO, RelayState.NC)
-
-    def test_hold_down_while_no(self):
-        self.change_state(RelayState.NO, lambda x: x.hold_down('r0'),
-                          RelayState.NC)
-
-    def test_hold_down_while_nc(self):
-        self.change_state(RelayState.NC, lambda x: x.hold_down('r0'),
-                          RelayState.NC)
-
-    def test_release_while_nc(self):
-        self.change_state(RelayState.NC, lambda x: x.release('r0'),
-                          RelayState.NO)
-
-
-class ActsRelayDeviceTest(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-
-        self.board_config = {
-            'name': 'MockBoard',
-            'relays': [{
-                'id': 0,
-                'relay_pos': 0
-            }, {
-                'id': 1,
-                'relay_pos': 1
-            }]
-        }
-
-        self.board = MockBoard(self.board_config)
-        self.r0 = Relay(self.board, 0)
-        self.r1 = Relay(self.board, 1)
-        self.board.set(self.r0.position, RelayState.NO)
-        self.board.set(self.r1.position, RelayState.NO)
-
-        self.rig = RelayRigMock()
-        self.rig.boards['MockBoard'] = self.board
-        self.rig.relays[self.r0.relay_id] = self.r0
-        self.rig.relays[self.r1.relay_id] = self.r1
-
-        self.device_config = {
-            "type": "GenericRelayDevice",
-            "name": "device",
-            "relays": {
-                'r0': 'MockBoard/0',
-                'r1': 'MockBoard/1'
-            }
-        }
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_init_raise_on_name_missing(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        del flawed_config['name']
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_name_wrong_type(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['name'] = {}
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_missing(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        del flawed_config['relays']
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_wrong_type(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['relays'] = str
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_is_empty(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['relays'] = []
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_are_dicts_without_names(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['relays'] = [{'id': 0}, {'id': 1}]
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_are_dicts_without_ids(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['relays'] = [{'name': 'r0'}, {'name': 'r1'}]
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_pass_relays_have_ids_and_names(self):
-        RelayDevice(self.device_config, self.rig)
-
-
-class TestRelayRigParser(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.board_config = {
-            'name': 'MockBoard',
-            'relays': [{
-                'id': 'r0',
-                'relay_pos': 0
-            }, {
-                'id': 'r1',
-                'relay_pos': 1
-            }]
-        }
-        self.r0 = self.board_config['relays'][0]
-        self.r1 = self.board_config['relays'][1]
-        self.board = MockBoard(self.board_config)
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_create_relay_board_raise_on_missing_type(self):
-        with self.assertRaises(RelayConfigError):
-            RelayRigMock().create_relay_board(self.board_config)
-
-    def test_create_relay_board_valid_config(self):
-        config = copy.deepcopy(self.board_config)
-        config['type'] = 'SainSmartBoard'
-        RelayRigMock().create_relay_board(config)
-
-    def test_create_relay_board_raise_on_type_not_found(self):
-        flawed_config = copy.deepcopy(self.board_config)
-        flawed_config['type'] = 'NonExistentBoard'
-        with self.assertRaises(RelayConfigError):
-            RelayRigMock().create_relay_board(flawed_config)
-
-    def test_create_relay_device_create_generic_on_missing_type(self):
-        rig = RelayRigMock()
-        rig.relays['r0'] = self.r0
-        rig.relays['r1'] = self.r1
-        config = {
-            'name': 'name',
-            'relays': {
-                'r0': 'MockBoard/0',
-                'r1': 'MockBoard/1'
-            }
-        }
-        device = rig.create_relay_device(config)
-        self.assertIsInstance(device, GenericRelayDevice)
-
-    def test_create_relay_device_config_with_type(self):
-        rig = RelayRigMock()
-        rig.relays['r0'] = self.r0
-        rig.relays['r1'] = self.r1
-        config = {
-            'type': 'GenericRelayDevice',
-            'name': '.',
-            'relays': {
-                'r0': 'MockBoard/0',
-                'r1': 'MockBoard/1'
-            }
-        }
-        device = rig.create_relay_device(config)
-        self.assertIsInstance(device, GenericRelayDevice)
-
-    def test_create_relay_device_raise_on_type_not_found(self):
-        rig = RelayRigMock()
-        rig.relays['r0'] = self.r0
-        rig.relays['r1'] = self.r1
-        config = {
-            'type':
-            'SomeInvalidType',
-            'name':
-            '.',
-            'relays': [{
-                'name': 'r0',
-                'pos': 'MockBoard/0'
-            }, {
-                'name': 'r1',
-                'pos': 'MockBoard/1'
-            }]
-        }
-        with self.assertRaises(RelayConfigError):
-            rig.create_relay_device(config)
-
-
-class TestSynchronizeRelays(unittest.TestCase):
-    def test_synchronize_relays(self):
-        Relay.transition_wait_time = .1
-        with SynchronizeRelays():
-            self.assertEqual(Relay.transition_wait_time, 0)
-        self.assertEqual(Relay.transition_wait_time, .1)
-
-
-class FuguMockBoard(MockBoard):
-    def get_relay_position_list(self):
-        return range(4)
-
-
-class TestFuguRemote(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        self.mock_rig = RelayRigMock(
-            {"boards": [{
-                'name': 'MockBoard',
-                'type': 'FuguMockBoard'
-            }]})
-        self.mock_board = self.mock_rig.boards['MockBoard']
-        self.fugu_config = {
-            'type': 'FuguRemote',
-            'name': 'UniqueDeviceName',
-            'mac_address': '00:00:00:00:00:00',
-            'relays': {
-                'Power': 'MockBoard/0',
-                fugu_remote.Buttons.BACK.value: 'MockBoard/1',
-                fugu_remote.Buttons.HOME.value: 'MockBoard/2',
-                fugu_remote.Buttons.PLAY_PAUSE.value: 'MockBoard/3'
-            }
-        }
-        Relay.button_press_time = 0
-
-    def tearDown(self):
-        Relay.button_press_time = .25
-        Relay.transition_wait_time = .2
-
-    def test_config_missing_button(self):
-        """FuguRemote __init__ should throw an error if a relay is missing."""
-        flawed_config = copy.deepcopy(self.fugu_config)
-        del flawed_config['relays']['Power']
-        del flawed_config['relays'][fugu_remote.Buttons.BACK.value]
-        with self.assertRaises(RelayConfigError):
-            fugu_remote.FuguRemote(flawed_config, self.mock_rig)
-
-    def test_config_missing_mac_address(self):
-        """FuguRemote __init__ should throw an error without a mac address."""
-        flawed_config = copy.deepcopy(self.fugu_config)
-        del flawed_config['mac_address']
-        with self.assertRaises(RelayConfigError):
-            fugu_remote.FuguRemote(flawed_config, self.mock_rig)
-
-    def test_config_no_issues(self):
-        """FuguRemote __init__ should not throw errors for a correct config."""
-        fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-
-    def test_power_nc_after_setup(self):
-        """Power should be NORMALLY_CLOSED after calling setup if it exists."""
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu.setup()
-        self.assertEqual(self.mock_board.get_relay_status(0), RelayState.NC)
-
-    def press_button_success(self, relay_position):
-        self.assertEqual(self.mock_board.relay_states[relay_position],
-                         RelayState.NO)
-        self.assertEqual(self.mock_board.relay_previous_states[relay_position],
-                         RelayState.NC)
-
-    def test_press_play_pause(self):
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu.press_play_pause()
-        self.press_button_success(3)
-
-    def test_press_back(self):
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu.press_back()
-        self.press_button_success(1)
-
-    def test_press_home(self):
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu.press_home()
-        self.press_button_success(2)
-
-    def test_enter_pairing_mode(self):
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu_remote.PAIRING_MODE_WAIT_TIME = 0
-        fugu.enter_pairing_mode()
-        self.press_button_success(2)
-        self.press_button_success(1)
-
-
-class TestRelayDict(unittest.TestCase):
-    def test_init(self):
-        mock_device = object()
-        blank_dict = dict()
-        relay_dict = RelayDict(mock_device, blank_dict)
-        self.assertEqual(relay_dict._store, blank_dict)
-        self.assertEqual(relay_dict.relay_device, mock_device)
-
-    def test_get_item_valid_key(self):
-        mock_device = object()
-        blank_dict = {'key': 'value'}
-        relay_dict = RelayDict(mock_device, blank_dict)
-        self.assertEqual(relay_dict['key'], 'value')
-
-    def test_get_item_invalid_key(self):
-        # Create an object with a single attribute 'name'
-        mock_device = type('', (object, ), {'name': 'name'})()
-        blank_dict = {'key': 'value'}
-        relay_dict = RelayDict(mock_device, blank_dict)
-        with self.assertRaises(RelayConfigError):
-            value = relay_dict['not_key']
-
-    def test_iter(self):
-        mock_device = type('', (object, ), {'name': 'name'})()
-        data_dict = {'a': '1', 'b': '2', 'c': '3'}
-        relay_dict = RelayDict(mock_device, data_dict)
-
-        rd_set = set()
-        for key in relay_dict:
-            rd_set.add(key)
-        dd_set = set()
-        for key in data_dict:
-            dd_set.add(key)
-
-        self.assertSetEqual(rd_set, dd_set)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_test_decorators_test.py b/src/antlion/unit_tests/acts_test_decorators_test.py
index d7bc12d..7ffc83d 100755
--- a/src/antlion/unit_tests/acts_test_decorators_test.py
+++ b/src/antlion/unit_tests/acts_test_decorators_test.py
@@ -38,24 +38,24 @@
 
 
 def raise_pass():
-    raise signals.TestPass('')
+    raise signals.TestPass("")
 
 
 def raise_failure():
-    raise signals.TestFailure('')
+    raise signals.TestFailure("")
 
 
 def raise_sl4a():
-    raise rpc_client.Sl4aException('')
+    raise rpc_client.Sl4aException("")
 
 
 def raise_generic():
-    raise Exception('')
+    raise Exception("")
 
 
 class MockTest(base_test.BaseTestClass):
-    TEST_CASE_LIST = 'test_run_mock_test'
-    TEST_LOGIC_ATTR = 'test_logic'
+    TEST_CASE_LIST = "test_run_mock_test"
+    TEST_LOGIC_ATTR = "test_logic"
 
     def test_run_mock_test(self):
         getattr(MockTest, MockTest.TEST_LOGIC_ATTR, None)()
@@ -66,11 +66,10 @@
     def setUpClass(cls):
         cls.tmp_dir = tempfile.mkdtemp()
         cls.MOCK_CONFIG = mobly_config_parser.TestRunConfig()
-        cls.MOCK_CONFIG.testbed_name = 'SampleTestBed'
+        cls.MOCK_CONFIG.testbed_name = "SampleTestBed"
         cls.MOCK_CONFIG.log_path = cls.tmp_dir
 
-        cls.MOCK_TEST_RUN_LIST = [(MockTest.__name__,
-                                   [MockTest.TEST_CASE_LIST])]
+        cls.MOCK_TEST_RUN_LIST = [(MockTest.__name__, [MockTest.TEST_CASE_LIST])]
 
     @classmethod
     def tearDownClass(cls):
@@ -80,17 +79,18 @@
         if hasattr(MockTest, MockTest.TEST_LOGIC_ATTR):
             delattr(MockTest, MockTest.TEST_LOGIC_ATTR)
         setattr(MockTest, MockTest.TEST_LOGIC_ATTR, func)
-        self.test_runner = test_runner.TestRunner(self.MOCK_CONFIG,
-                                                  self.MOCK_TEST_RUN_LIST)
+        self.test_runner = test_runner.TestRunner(
+            self.MOCK_CONFIG, self.MOCK_TEST_RUN_LIST
+        )
         self.test_runner.run(MockTest)
 
     def _validate_results_has_extra(self, result, extra_key, extra_value):
         results = self.test_runner.results
-        self.assertGreaterEqual(len(results.executed), 1,
-                                'Expected at least one executed test.')
+        self.assertGreaterEqual(
+            len(results.executed), 1, "Expected at least one executed test."
+        )
         record = results.executed[0]
-        self.assertIsNotNone(record.extras,
-                             'Expected the test record to have extras.')
+        self.assertIsNotNone(record.extras, "Expected the test record to have extras.")
         self.assertEqual(record.extras[extra_key], extra_value)
 
     def test_mock_test_with_raise_pass(self):
@@ -107,20 +107,20 @@
             self.assertIsInstance(results[1], signals.TestFailure)
             self.assertIsInstance(results[2], signals.TestError)
             self.assertIsInstance(results[3], IndexError)
-            raise signals.TestPass('Expected failures occurred')
+            raise signals.TestPass("Expected failures occurred")
 
         @test_decorators.repeated_test(1, 3, result_selector)
         def test_case(_, attempt_number):
             if attempt_number == 1:
                 raise AssertionError()
             elif attempt_number == 2:
-                raise signals.TestFailure('Failed')
+                raise signals.TestFailure("Failed")
             elif attempt_number == 3:
-                raise signals.TestError('Error')
+                raise signals.TestError("Error")
             else:
                 # Note that any Exception that does not fall into another bucket
                 # is also considered a failure
-                raise IndexError('Bad index')
+                raise IndexError("Bad index")
 
         with self.assertRaises(signals.TestPass):
             test_case(mock.Mock())
@@ -130,11 +130,11 @@
             self.assertEqual(len(results), 3)
             for result in results:
                 self.assertIsInstance(result, signals.TestPass)
-            raise signals.TestPass('Expected passes occurred')
+            raise signals.TestPass("Expected passes occurred")
 
         @test_decorators.repeated_test(3, 0, result_selector)
         def test_case(*_):
-            raise signals.TestPass('Passed')
+            raise signals.TestPass("Passed")
 
         with self.assertRaises(signals.TestPass):
             test_case(mock.Mock())
@@ -142,7 +142,7 @@
     def test_abort_signals_are_uncaught(self):
         @test_decorators.repeated_test(3, 0)
         def test_case(*_):
-            raise signals.TestAbortClass('Abort All')
+            raise signals.TestAbortClass("Abort All")
 
         with self.assertRaises(signals.TestAbortClass):
             test_case(mock.Mock())
@@ -160,7 +160,7 @@
 
         @test_decorators.repeated_test(1, 1)
         def test_case(*_):
-            raise signals.TestFailure('Failed')
+            raise signals.TestFailure("Failed")
 
         with self.assertRaises(signals.TestFailure):
             test_case(mock_test_class)
@@ -170,15 +170,15 @@
 
     def test_result_selector_returned_value_gets_raised(self):
         def result_selector(*_):
-            return signals.TestPass('Expect this to be raised.')
+            return signals.TestPass("Expect this to be raised.")
 
         @test_decorators.repeated_test(3, 0, result_selector=result_selector)
         def test_case(*_):
-            raise signals.TestFailure('Result selector ignores this.')
+            raise signals.TestFailure("Result selector ignores this.")
 
         with self.assertRaises(signals.TestPass):
             test_case(mock.Mock())
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/acts_test_runner_test.py b/src/antlion/unit_tests/acts_test_runner_test.py
index c9132ad..9b3cc58 100755
--- a/src/antlion/unit_tests/acts_test_runner_test.py
+++ b/src/antlion/unit_tests/acts_test_runner_test.py
@@ -34,19 +34,17 @@
     """This test class has unit tests for the implementation of everything
     under antlion.test_runner.
     """
+
     def setUp(self):
         self.tmp_dir = tempfile.mkdtemp()
         self.base_mock_test_config = TestRunConfig()
-        self.base_mock_test_config.testbed_name = 'SampleTestBed'
+        self.base_mock_test_config.testbed_name = "SampleTestBed"
         self.base_mock_test_config.log_path = self.tmp_dir
         self.base_mock_test_config.controller_configs = {
-            'testpaths': [os.path.dirname(IntegrationTest.__file__)]
+            "testpaths": [os.path.dirname(IntegrationTest.__file__)]
         }
-        self.base_mock_test_config.user_params = {
-            'icecream': 42,
-            'extra_param': 'haha'
-        }
-        self.mock_run_list = [('SampleTest', None)]
+        self.base_mock_test_config.user_params = {"icecream": 42, "extra_param": "haha"}
+        self.mock_run_list = [("SampleTest", None)]
 
     def tearDown(self):
         shutil.rmtree(self.tmp_dir)
@@ -60,40 +58,44 @@
         mock_test_config = self.base_mock_test_config.copy()
         tb_key = keys.Config.key_testbed.value
         mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        my_config = [{
-            'serial': 'xxxx',
-            'magic': 'Magic1'
-        }, {
-            'serial': 'xxxx',
-            'magic': 'Magic2'
-        }]
+        my_config = [
+            {"serial": "xxxx", "magic": "Magic1"},
+            {"serial": "xxxx", "magic": "Magic2"},
+        ]
         mock_test_config.controller_configs[mock_ctrlr_config_name] = my_config
-        tr = test_runner.TestRunner(mock_test_config,
-                                    [('IntegrationTest', None)])
+        tr = test_runner.TestRunner(mock_test_config, [("IntegrationTest", None)])
         tr.run()
         tr.run()
         tr.stop()
         results = tr.results.summary_dict()
-        self.assertEqual(results['Requested'], 2)
-        self.assertEqual(results['Executed'], 2)
-        self.assertEqual(results['Passed'], 2)
+        self.assertEqual(results["Requested"], 2)
+        self.assertEqual(results["Executed"], 2)
+        self.assertEqual(results["Passed"], 2)
 
-    @mock.patch('antlion.controllers.adb.AdbProxy',
-                return_value=acts_android_device_test.MockAdbProxy(
-                    1, return_value=''))
-    @mock.patch('antlion.controllers.fastboot.FastbootProxy',
-                return_value=acts_android_device_test.MockFastbootProxy(1))
-    @mock.patch('antlion.controllers.android_device.list_adb_devices',
-                return_value=['1'])
-    @mock.patch('antlion.controllers.android_device.get_all_instances',
-                return_value=acts_android_device_test.get_mock_ads(1))
     @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.ensure_screen_on',
-        return_value=True)
+        "antlion.controllers.adb.AdbProxy",
+        return_value=acts_android_device_test.MockAdbProxy(1, return_value=""),
+    )
     @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.exit_setup_wizard',
-        return_value=True)
-    @mock.patch('antlion.controllers.android_device.AndroidDevice.start_services')
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=acts_android_device_test.MockFastbootProxy(1),
+    )
+    @mock.patch(
+        "antlion.controllers.android_device.list_adb_devices", return_value=["1"]
+    )
+    @mock.patch(
+        "antlion.controllers.android_device.get_all_instances",
+        return_value=acts_android_device_test.get_mock_ads(1),
+    )
+    @mock.patch(
+        "antlion.controllers.android_device.AndroidDevice.ensure_screen_on",
+        return_value=True,
+    )
+    @mock.patch(
+        "antlion.controllers.android_device.AndroidDevice.exit_setup_wizard",
+        return_value=True,
+    )
+    @mock.patch("antlion.controllers.android_device.AndroidDevice.start_services")
     def test_run_two_test_classes(self, *_):
         """Verifies that running more than one test class in one test run works
         properly.
@@ -104,30 +106,24 @@
         mock_test_config = self.base_mock_test_config.copy()
         tb_key = keys.Config.key_testbed.value
         mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        my_config = [{
-            'serial': 'xxxx',
-            'magic': 'Magic1'
-        }, {
-            'serial': 'xxxx',
-            'magic': 'Magic2'
-        }]
+        my_config = [
+            {"serial": "xxxx", "magic": "Magic1"},
+            {"serial": "xxxx", "magic": "Magic2"},
+        ]
         mock_test_config.controller_configs[mock_ctrlr_config_name] = my_config
-        mock_test_config.controller_configs['AndroidDevice'] = [{
-            'serial':
-            '1',
-            'skip_sl4a':
-            True
-        }]
-        tr = test_runner.TestRunner(mock_test_config,
-                                    [('IntegrationTest', None),
-                                     ('IntegrationTest', None)])
+        mock_test_config.controller_configs["AndroidDevice"] = [
+            {"serial": "1", "skip_sl4a": True}
+        ]
+        tr = test_runner.TestRunner(
+            mock_test_config, [("IntegrationTest", None), ("IntegrationTest", None)]
+        )
         tr.run()
         tr.stop()
         results = tr.results.summary_dict()
-        self.assertEqual(results['Requested'], 2)
-        self.assertEqual(results['Executed'], 2)
-        self.assertEqual(results['Passed'], 2)
+        self.assertEqual(results["Requested"], 2)
+        self.assertEqual(results["Executed"], 2)
+        self.assertEqual(results["Passed"], 2)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/acts_utils_test.py b/src/antlion/unit_tests/acts_utils_test.py
index 1c8b21f..a3abe2e 100755
--- a/src/antlion/unit_tests/acts_utils_test.py
+++ b/src/antlion/unit_tests/acts_utils_test.py
@@ -23,11 +23,12 @@
 
 from antlion import utils
 from antlion import signals
+from antlion.capabilities.ssh import SSHConfig, SSHResult
 from antlion.controllers.adb_lib.error import AdbError
 from antlion.controllers.android_device import AndroidDevice
 from antlion.controllers.fuchsia_device import FuchsiaDevice
 from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import SSHConfig, SSHProvider, SSHResult
+from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
 from antlion.controllers.utils_lib.ssh.connection import SshConnection
 from antlion.libs.proc import job
 
@@ -41,89 +42,66 @@
 MOCK_WLAN1_IP_ADDRESSES = ""
 
 FUCHSIA_INTERFACES = {
-    'id':
-    '1',
-    'result': [
+    "id": "1",
+    "result": [
         {
-            'id': 1,
-            'name': 'lo',
-            'ipv4_addresses': [
+            "id": 1,
+            "name": "lo",
+            "ipv4_addresses": [
                 [127, 0, 0, 1],
             ],
-            'ipv6_addresses': [
+            "ipv6_addresses": [
                 [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
             ],
-            'online': True,
-            'mac': [0, 0, 0, 0, 0, 0],
+            "online": True,
+            "mac": [0, 0, 0, 0, 0, 0],
         },
         {
-            'id':
-            2,
-            'name':
-            'eno1',
-            'ipv4_addresses': [
+            "id": 2,
+            "name": "eno1",
+            "ipv4_addresses": [
                 [100, 127, 110, 79],
             ],
-            'ipv6_addresses': [
-                [
-                    254, 128, 0, 0, 0, 0, 0, 0, 198, 109, 60, 117, 44, 236, 29,
-                    114
-                ],
-                [
-                    36, 1, 250, 0, 4, 128, 122, 0, 141, 79, 133, 255, 204, 92,
-                    120, 126
-                ],
-                [
-                    36, 1, 250, 0, 4, 128, 122, 0, 4, 89, 185, 147, 252, 191,
-                    20, 25
-                ],
+            "ipv6_addresses": [
+                [254, 128, 0, 0, 0, 0, 0, 0, 198, 109, 60, 117, 44, 236, 29, 114],
+                [36, 1, 250, 0, 4, 128, 122, 0, 141, 79, 133, 255, 204, 92, 120, 126],
+                [36, 1, 250, 0, 4, 128, 122, 0, 4, 89, 185, 147, 252, 191, 20, 25],
             ],
-            'online':
-            True,
-            'mac': [0, 224, 76, 5, 76, 229],
+            "online": True,
+            "mac": [0, 224, 76, 5, 76, 229],
         },
         {
-            'id':
-            3,
-            'name':
-            'wlanxc0',
-            'ipv4_addresses': [],
-            'ipv6_addresses': [
-                [
-                    254, 128, 0, 0, 0, 0, 0, 0, 96, 255, 93, 96, 52, 253, 253,
-                    243
-                ],
-                [
-                    254, 128, 0, 0, 0, 0, 0, 0, 70, 7, 11, 255, 254, 118, 126,
-                    192
-                ],
+            "id": 3,
+            "name": "wlanxc0",
+            "ipv4_addresses": [],
+            "ipv6_addresses": [
+                [254, 128, 0, 0, 0, 0, 0, 0, 96, 255, 93, 96, 52, 253, 253, 243],
+                [254, 128, 0, 0, 0, 0, 0, 0, 70, 7, 11, 255, 254, 118, 126, 192],
             ],
-            'online':
-            False,
-            'mac': [68, 7, 11, 118, 126, 192],
+            "online": False,
+            "mac": [68, 7, 11, 118, 126, 192],
         },
     ],
-    'error':
-    None,
+    "error": None,
 }
 
 CORRECT_FULL_IP_LIST = {
-    'ipv4_private': [],
-    'ipv4_public': ['100.127.110.79'],
-    'ipv6_link_local': ['fe80::c66d:3c75:2cec:1d72'],
-    'ipv6_private_local': [],
-    'ipv6_public': [
-        '2401:fa00:480:7a00:8d4f:85ff:cc5c:787e',
-        '2401:fa00:480:7a00:459:b993:fcbf:1419'
-    ]
+    "ipv4_private": [],
+    "ipv4_public": ["100.127.110.79"],
+    "ipv6_link_local": ["fe80::c66d:3c75:2cec:1d72"],
+    "ipv6_private_local": [],
+    "ipv6_public": [
+        "2401:fa00:480:7a00:8d4f:85ff:cc5c:787e",
+        "2401:fa00:480:7a00:459:b993:fcbf:1419",
+    ],
 }
 
 CORRECT_EMPTY_IP_LIST = {
-    'ipv4_private': [],
-    'ipv4_public': [],
-    'ipv6_link_local': [],
-    'ipv6_private_local': [],
-    'ipv6_public': []
+    "ipv4_private": [],
+    "ipv4_public": [],
+    "ipv6_link_local": [],
+    "ipv6_private_local": [],
+    "ipv6_public": [],
 }
 
 
@@ -131,18 +109,16 @@
     """This test class for unit testing antlion.utils.bypass_setup_wizard."""
 
     def test_start_standing_subproc(self):
-        with self.assertRaisesRegex(utils.ActsUtilsError,
-                                    'Process .* has terminated'):
-            utils.start_standing_subprocess('sleep 0', check_health_delay=0.1)
+        with self.assertRaisesRegex(utils.ActsUtilsError, "Process .* has terminated"):
+            utils.start_standing_subprocess("sleep 0", check_health_delay=0.1)
 
     def test_stop_standing_subproc(self):
-        p = utils.start_standing_subprocess('sleep 0')
+        p = utils.start_standing_subprocess("sleep 0")
         time.sleep(0.1)
-        with self.assertRaisesRegex(utils.ActsUtilsError,
-                                    'Process .* has terminated'):
+        with self.assertRaisesRegex(utils.ActsUtilsError, "Process .* has terminated"):
             utils.stop_standing_subprocess(p)
 
-    @mock.patch('time.sleep')
+    @mock.patch("time.sleep")
     def test_bypass_setup_wizard_no_complications(self, _):
         ad = mock.Mock()
         ad.adb.shell.side_effect = [
@@ -155,10 +131,10 @@
         self.assertTrue(utils.bypass_setup_wizard(ad))
         self.assertFalse(
             ad.adb.root_adb.called,
-            'The root command should not be called if there are no '
-            'complications.')
+            "The root command should not be called if there are no " "complications.",
+        )
 
-    @mock.patch('time.sleep')
+    @mock.patch("time.sleep")
     def test_bypass_setup_wizard_unrecognized_error(self, _):
         ad = mock.Mock()
         ad.adb.shell.side_effect = [
@@ -171,10 +147,11 @@
             utils.bypass_setup_wizard(ad)
         self.assertFalse(
             ad.adb.root_adb.called,
-            'The root command should not be called if we do not have a '
-            'codepath for recovering from the failure.')
+            "The root command should not be called if we do not have a "
+            "codepath for recovering from the failure.",
+        )
 
-    @mock.patch('time.sleep')
+    @mock.patch("time.sleep")
     def test_bypass_setup_wizard_need_root_access(self, _):
         ad = mock.Mock()
         ad.adb.shell.side_effect = [
@@ -183,17 +160,17 @@
             # Return value for rooting the device
             BypassSetupWizardReturn.NO_COMPLICATIONS,
             # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD
+            PROVISIONED_STATE_GOOD,
         ]
 
         utils.bypass_setup_wizard(ad)
 
         self.assertTrue(
             ad.adb.root_adb_called,
-            'The command required root access, but the device was never '
-            'rooted.')
+            "The command required root access, but the device was never " "rooted.",
+        )
 
-    @mock.patch('time.sleep')
+    @mock.patch("time.sleep")
     def test_bypass_setup_wizard_need_root_already_skipped(self, _):
         ad = mock.Mock()
         ad.adb.shell.side_effect = [
@@ -202,12 +179,12 @@
             # Return value for SetupWizardExitActivity after root
             BypassSetupWizardReturn.ALREADY_BYPASSED,
             # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD
+            PROVISIONED_STATE_GOOD,
         ]
         self.assertTrue(utils.bypass_setup_wizard(ad))
         self.assertTrue(ad.adb.root_adb_called)
 
-    @mock.patch('time.sleep')
+    @mock.patch("time.sleep")
     def test_bypass_setup_wizard_root_access_still_fails(self, _):
         ad = mock.Mock()
         ad.adb.shell.side_effect = [
@@ -216,7 +193,7 @@
             # Return value for SetupWizardExitActivity after root
             BypassSetupWizardReturn.UNRECOGNIZED_ERR,
             # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD
+            PROVISIONED_STATE_GOOD,
         ]
 
         with self.assertRaises(AdbError):
@@ -227,38 +204,52 @@
 class BypassSetupWizardReturn:
     # No complications. Bypass works the first time without issues.
     NO_COMPLICATIONS = (
-        'Starting: Intent { cmp=com.google.android.setupwizard/'
-        '.SetupWizardExitActivity }')
+        "Starting: Intent { cmp=com.google.android.setupwizard/"
+        ".SetupWizardExitActivity }"
+    )
 
     # Fail with doesn't need to be skipped/was skipped already.
-    ALREADY_BYPASSED = AdbError('', 'ADB_CMD_OUTPUT:0', 'Error type 3\n'
-                                'Error: Activity class', 1)
+    ALREADY_BYPASSED = AdbError(
+        "", "ADB_CMD_OUTPUT:0", "Error type 3\n" "Error: Activity class", 1
+    )
     # Fail with different error.
-    UNRECOGNIZED_ERR = AdbError('', 'ADB_CMD_OUTPUT:0', 'Error type 4\n'
-                                'Error: Activity class', 0)
+    UNRECOGNIZED_ERR = AdbError(
+        "", "ADB_CMD_OUTPUT:0", "Error type 4\n" "Error: Activity class", 0
+    )
     # Fail, get root access, then no complications arise.
     ROOT_ADB_NO_COMP = AdbError(
-        '', 'ADB_CMD_OUTPUT:255', 'Security exception: Permission Denial: '
-        'starting Intent { flg=0x10000000 '
-        'cmp=com.google.android.setupwizard/'
-        '.SetupWizardExitActivity } from null '
-        '(pid=5045, uid=2000) not exported from uid '
-        '10000', 0)
+        "",
+        "ADB_CMD_OUTPUT:255",
+        "Security exception: Permission Denial: "
+        "starting Intent { flg=0x10000000 "
+        "cmp=com.google.android.setupwizard/"
+        ".SetupWizardExitActivity } from null "
+        "(pid=5045, uid=2000) not exported from uid "
+        "10000",
+        0,
+    )
     # Even with root access, the bypass setup wizard doesn't need to be skipped.
     ROOT_ADB_SKIPPED = AdbError(
-        '', 'ADB_CMD_OUTPUT:255', 'Security exception: Permission Denial: '
-        'starting Intent { flg=0x10000000 '
-        'cmp=com.google.android.setupwizard/'
-        '.SetupWizardExitActivity } from null '
-        '(pid=5045, uid=2000) not exported from '
-        'uid 10000', 0)
+        "",
+        "ADB_CMD_OUTPUT:255",
+        "Security exception: Permission Denial: "
+        "starting Intent { flg=0x10000000 "
+        "cmp=com.google.android.setupwizard/"
+        ".SetupWizardExitActivity } from null "
+        "(pid=5045, uid=2000) not exported from "
+        "uid 10000",
+        0,
+    )
     # Even with root access, the bypass setup wizard fails
     ROOT_ADB_FAILS = AdbError(
-        '', 'ADB_CMD_OUTPUT:255',
-        'Security exception: Permission Denial: starting Intent { '
-        'flg=0x10000000 cmp=com.google.android.setupwizard/'
-        '.SetupWizardExitActivity } from null (pid=5045, uid=2000) not '
-        'exported from uid 10000', 0)
+        "",
+        "ADB_CMD_OUTPUT:255",
+        "Security exception: Permission Denial: starting Intent { "
+        "flg=0x10000000 cmp=com.google.android.setupwizard/"
+        ".SetupWizardExitActivity } from null (pid=5045, uid=2000) not "
+        "exported from uid 10000",
+        0,
+    )
 
 
 class ConcurrentActionsTest(unittest.TestCase):
@@ -272,22 +263,22 @@
     def function_raises_passed_in_exception_type(exception_type):
         raise exception_type
 
-    def test_run_concurrent_actions_no_raise_returns_proper_return_values(
-            self):
+    def test_run_concurrent_actions_no_raise_returns_proper_return_values(self):
         """Tests run_concurrent_actions_no_raise returns in the correct order.
 
         Each function passed into run_concurrent_actions_no_raise returns the
         values returned from each individual callable in the order passed in.
         """
         ret_values = utils.run_concurrent_actions_no_raise(
-            lambda: self.function_returns_passed_in_arg('ARG1'),
-            lambda: self.function_returns_passed_in_arg('ARG2'),
-            lambda: self.function_returns_passed_in_arg('ARG3'))
+            lambda: self.function_returns_passed_in_arg("ARG1"),
+            lambda: self.function_returns_passed_in_arg("ARG2"),
+            lambda: self.function_returns_passed_in_arg("ARG3"),
+        )
 
         self.assertEqual(len(ret_values), 3)
-        self.assertEqual(ret_values[0], 'ARG1')
-        self.assertEqual(ret_values[1], 'ARG2')
-        self.assertEqual(ret_values[2], 'ARG3')
+        self.assertEqual(ret_values[0], "ARG1")
+        self.assertEqual(ret_values[1], "ARG2")
+        self.assertEqual(ret_values[2], "ARG3")
 
     def test_run_concurrent_actions_no_raise_returns_raised_exceptions(self):
         """Tests run_concurrent_actions_no_raise returns raised exceptions.
@@ -298,7 +289,8 @@
         """
         ret_values = utils.run_concurrent_actions_no_raise(
             lambda: self.function_raises_passed_in_exception_type(IndexError),
-            lambda: self.function_raises_passed_in_exception_type(KeyError))
+            lambda: self.function_raises_passed_in_exception_type(KeyError),
+        )
 
         self.assertEqual(len(ret_values), 2)
         self.assertEqual(ret_values[0].__class__, IndexError)
@@ -312,37 +304,39 @@
         """
 
         ret_values = utils.run_concurrent_actions(
-            lambda: self.function_returns_passed_in_arg('ARG1'),
-            lambda: self.function_returns_passed_in_arg('ARG2'),
-            lambda: self.function_returns_passed_in_arg('ARG3'))
+            lambda: self.function_returns_passed_in_arg("ARG1"),
+            lambda: self.function_returns_passed_in_arg("ARG2"),
+            lambda: self.function_returns_passed_in_arg("ARG3"),
+        )
 
         self.assertEqual(len(ret_values), 3)
-        self.assertEqual(ret_values[0], 'ARG1')
-        self.assertEqual(ret_values[1], 'ARG2')
-        self.assertEqual(ret_values[2], 'ARG3')
+        self.assertEqual(ret_values[0], "ARG1")
+        self.assertEqual(ret_values[1], "ARG2")
+        self.assertEqual(ret_values[2], "ARG3")
 
     def test_run_concurrent_actions_raises_exceptions(self):
         """Tests run_concurrent_actions raises exceptions from given actions."""
         with self.assertRaises(KeyError):
             utils.run_concurrent_actions(
-                lambda: self.function_returns_passed_in_arg('ARG1'), lambda:
-                self.function_raises_passed_in_exception_type(KeyError))
+                lambda: self.function_returns_passed_in_arg("ARG1"),
+                lambda: self.function_raises_passed_in_exception_type(KeyError),
+            )
 
     def test_test_concurrent_actions_raises_non_test_failure(self):
         """Tests test_concurrent_actions raises the given exception."""
         with self.assertRaises(KeyError):
             utils.test_concurrent_actions(
-                lambda: self.function_raises_passed_in_exception_type(KeyError
-                                                                      ),
-                failure_exceptions=signals.TestFailure)
+                lambda: self.function_raises_passed_in_exception_type(KeyError),
+                failure_exceptions=signals.TestFailure,
+            )
 
     def test_test_concurrent_actions_raises_test_failure(self):
         """Tests test_concurrent_actions raises the given exception."""
         with self.assertRaises(signals.TestFailure):
             utils.test_concurrent_actions(
-                lambda: self.function_raises_passed_in_exception_type(KeyError
-                                                                      ),
-                failure_exceptions=KeyError)
+                lambda: self.function_raises_passed_in_exception_type(KeyError),
+                failure_exceptions=KeyError,
+            )
 
 
 class SuppressLogOutputTest(unittest.TestCase):
@@ -356,21 +350,23 @@
             logging.NullHandler(level=lvl)
             for lvl in (logging.DEBUG, logging.INFO, logging.ERROR)
         ]
-        log = logging.getLogger('test_log')
+        log = logging.getLogger("test_log")
         for handler in handlers:
             log.addHandler(handler)
         with utils.SuppressLogOutput(log, [logging.INFO, logging.ERROR]):
             self.assertTrue(
-                any(handler.level == logging.DEBUG
-                    for handler in log.handlers))
+                any(handler.level == logging.DEBUG for handler in log.handlers)
+            )
             self.assertFalse(
-                any(handler.level in (logging.INFO, logging.ERROR)
-                    for handler in log.handlers))
+                any(
+                    handler.level in (logging.INFO, logging.ERROR)
+                    for handler in log.handlers
+                )
+            )
         self.assertCountEqual(handlers, log.handlers)
 
 
 class IpAddressUtilTest(unittest.TestCase):
-
     def test_positive_ipv4_normal_address(self):
         ip_address = "192.168.1.123"
         self.assertTrue(utils.is_valid_ipv4_address(ip_address))
@@ -396,164 +392,193 @@
         self.assertFalse(utils.is_valid_ipv4_address(ip_address))
 
     def test_positive_ipv6(self):
-        ip_address = 'fe80::f693:9fff:fef4:1ac'
+        ip_address = "fe80::f693:9fff:fef4:1ac"
         self.assertTrue(utils.is_valid_ipv6_address(ip_address))
 
     def test_positive_ipv6_link_local(self):
-        ip_address = 'fe80::'
+        ip_address = "fe80::"
         self.assertTrue(utils.is_valid_ipv6_address(ip_address))
 
     def test_negative_ipv6_with_ipv4_address(self):
-        ip_address = '192.168.1.123'
+        ip_address = "192.168.1.123"
         self.assertFalse(utils.is_valid_ipv6_address(ip_address))
 
     def test_negative_ipv6_invalid_characters(self):
-        ip_address = 'fe80:jkyr:f693:9fff:fef4:1ac'
+        ip_address = "fe80:jkyr:f693:9fff:fef4:1ac"
         self.assertFalse(utils.is_valid_ipv6_address(ip_address))
 
     def test_negative_ipv6_invalid_string(self):
-        ip_address = 'fdsafdsafdsafdsf'
+        ip_address = "fdsafdsafdsafdsf"
         self.assertFalse(utils.is_valid_ipv6_address(ip_address))
 
-    @mock.patch('antlion.libs.proc.job.run')
+    @mock.patch("antlion.libs.proc.job.run")
     def test_local_get_interface_ip_addresses_full(self, job_mock):
         job_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, 'utf-8'),
-                       encoding='utf-8'),
+            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, "utf-8"), encoding="utf-8"),
         ]
-        self.assertEqual(utils.get_interface_ip_addresses(job, 'eno1'),
-                         CORRECT_FULL_IP_LIST)
+        self.assertEqual(
+            utils.get_interface_ip_addresses(job, "eno1"), CORRECT_FULL_IP_LIST
+        )
 
-    @mock.patch('antlion.libs.proc.job.run')
+    @mock.patch("antlion.libs.proc.job.run")
     def test_local_get_interface_ip_addresses_empty(self, job_mock):
         job_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, 'utf-8'),
-                       encoding='utf-8'),
+            job.Result(
+                stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, "utf-8"), encoding="utf-8"
+            ),
         ]
-        self.assertEqual(utils.get_interface_ip_addresses(job, 'wlan1'),
-                         CORRECT_EMPTY_IP_LIST)
+        self.assertEqual(
+            utils.get_interface_ip_addresses(job, "wlan1"), CORRECT_EMPTY_IP_LIST
+        )
 
-    @mock.patch(
-        'antlion.controllers.utils_lib.ssh.connection.SshConnection.run')
+    @mock.patch("antlion.controllers.utils_lib.ssh.connection.SshConnection.run")
     def test_ssh_get_interface_ip_addresses_full(self, ssh_mock):
         ssh_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, 'utf-8'),
-                       encoding='utf-8'),
+            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, "utf-8"), encoding="utf-8"),
         ]
         self.assertEqual(
-            utils.get_interface_ip_addresses(SshConnection('mock_settings'),
-                                             'eno1'), CORRECT_FULL_IP_LIST)
+            utils.get_interface_ip_addresses(SshConnection("mock_settings"), "eno1"),
+            CORRECT_FULL_IP_LIST,
+        )
 
-    @mock.patch(
-        'antlion.controllers.utils_lib.ssh.connection.SshConnection.run')
+    @mock.patch("antlion.controllers.utils_lib.ssh.connection.SshConnection.run")
     def test_ssh_get_interface_ip_addresses_empty(self, ssh_mock):
         ssh_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, 'utf-8'),
-                       encoding='utf-8'),
+            job.Result(
+                stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, "utf-8"), encoding="utf-8"
+            ),
         ]
         self.assertEqual(
-            utils.get_interface_ip_addresses(SshConnection('mock_settings'),
-                                             'wlan1'), CORRECT_EMPTY_IP_LIST)
+            utils.get_interface_ip_addresses(SshConnection("mock_settings"), "wlan1"),
+            CORRECT_EMPTY_IP_LIST,
+        )
 
-    @mock.patch('antlion.controllers.adb.AdbProxy')
-    @mock.patch.object(AndroidDevice, 'is_bootloader', return_value=True)
-    def test_android_get_interface_ip_addresses_full(self, is_bootloader,
-                                                     adb_mock):
+    @mock.patch("antlion.controllers.adb.AdbProxy")
+    @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
+    def test_android_get_interface_ip_addresses_full(self, is_bootloader, adb_mock):
         adb_mock().shell.side_effect = [
             MOCK_ENO1_IP_ADDRESSES,
         ]
         self.assertEqual(
-            utils.get_interface_ip_addresses(AndroidDevice(), 'eno1'),
-            CORRECT_FULL_IP_LIST)
+            utils.get_interface_ip_addresses(AndroidDevice(), "eno1"),
+            CORRECT_FULL_IP_LIST,
+        )
 
-    @mock.patch('antlion.controllers.adb.AdbProxy')
-    @mock.patch.object(AndroidDevice, 'is_bootloader', return_value=True)
-    def test_android_get_interface_ip_addresses_empty(self, is_bootloader,
-                                                      adb_mock):
+    @mock.patch("antlion.controllers.adb.AdbProxy")
+    @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
+    def test_android_get_interface_ip_addresses_empty(self, is_bootloader, adb_mock):
         adb_mock().shell.side_effect = [
             MOCK_WLAN1_IP_ADDRESSES,
         ]
         self.assertEqual(
-            utils.get_interface_ip_addresses(AndroidDevice(), 'wlan1'),
-            CORRECT_EMPTY_IP_LIST)
+            utils.get_interface_ip_addresses(AndroidDevice(), "wlan1"),
+            CORRECT_EMPTY_IP_LIST,
+        )
 
-    @mock.patch('antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f',
-                new_callable=mock.PropertyMock)
-    @mock.patch('antlion.controllers.fuchsia_device.FuchsiaDevice.ffx',
-                new_callable=mock.PropertyMock)
-    @mock.patch('antlion.controllers.fuchsia_lib.utils_lib.wait_for_port')
-    @mock.patch('antlion.controllers.fuchsia_lib.ssh.SSHProvider.run')
     @mock.patch(
-        'antlion.controllers.fuchsia_lib.sl4f.SL4F._verify_sl4f_connection')
-    @mock.patch('antlion.controllers.fuchsia_device.'
-                'FuchsiaDevice._generate_ssh_config')
-    @mock.patch('antlion.controllers.'
-                'fuchsia_lib.netstack.netstack_lib.'
-                'FuchsiaNetstackLib.netstackListInterfaces')
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
+    @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
+    @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
+    @mock.patch(
+        "antlion.controllers.fuchsia_device." "FuchsiaDevice._generate_ssh_config"
+    )
+    @mock.patch(
+        "antlion.controllers."
+        "fuchsia_lib.netstack.netstack_lib."
+        "FuchsiaNetstackLib.netstackListInterfaces"
+    )
     def test_fuchsia_get_interface_ip_addresses_full(
-            self, list_interfaces_mock, generate_ssh_config_mock,
-            verify_sl4f_conn_mock, ssh_run_mock, wait_for_port_mock, ffx_mock,
-            sl4f_mock):
+        self,
+        list_interfaces_mock,
+        generate_ssh_config_mock,
+        ssh_wait_until_reachable_mock,
+        ssh_run_mock,
+        wait_for_port_mock,
+        ffx_mock,
+        sl4f_mock,
+    ):
         # Configure the log path which is required by ACTS logger.
-        logging.log_path = '/tmp/unit_test_garbage'
+        logging.log_path = "/tmp/unit_test_garbage"
 
-        ssh = SSHProvider(SSHConfig('192.168.1.1', 22, '/dev/null'))
+        ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
         ssh_run_mock.return_value = SSHResult(
-            subprocess.CompletedProcess([], 0, stdout=b'', stderr=b''))
+            subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
+        )
 
         # Don't try to wait for the SL4F server to start; it's not being used.
         wait_for_port_mock.return_value = None
 
-        sl4f_mock.return_value = SL4F(ssh, 'http://192.168.1.1:80')
-        verify_sl4f_conn_mock.return_value = None
+        sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
+        ssh_wait_until_reachable_mock.return_value = None
 
         list_interfaces_mock.return_value = FUCHSIA_INTERFACES
         self.assertEqual(
             utils.get_interface_ip_addresses(
-                FuchsiaDevice({'ip': '192.168.1.1'}), 'eno1'),
-            CORRECT_FULL_IP_LIST)
+                FuchsiaDevice({"ip": "192.168.1.1"}), "eno1"
+            ),
+            CORRECT_FULL_IP_LIST,
+        )
 
-    @mock.patch('antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f',
-                new_callable=mock.PropertyMock)
-    @mock.patch('antlion.controllers.fuchsia_device.FuchsiaDevice.ffx',
-                new_callable=mock.PropertyMock)
-    @mock.patch('antlion.controllers.fuchsia_lib.utils_lib.wait_for_port')
-    @mock.patch('antlion.controllers.fuchsia_lib.ssh.SSHProvider.run')
     @mock.patch(
-        'antlion.controllers.fuchsia_lib.sl4f.SL4F._verify_sl4f_connection')
-    @mock.patch('antlion.controllers.fuchsia_device.'
-                'FuchsiaDevice._generate_ssh_config')
-    @mock.patch('antlion.controllers.'
-                'fuchsia_lib.netstack.netstack_lib.'
-                'FuchsiaNetstackLib.netstackListInterfaces')
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
+    @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
+    @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
+    @mock.patch(
+        "antlion.controllers.fuchsia_device." "FuchsiaDevice._generate_ssh_config"
+    )
+    @mock.patch(
+        "antlion.controllers."
+        "fuchsia_lib.netstack.netstack_lib."
+        "FuchsiaNetstackLib.netstackListInterfaces"
+    )
     def test_fuchsia_get_interface_ip_addresses_empty(
-            self, list_interfaces_mock, generate_ssh_config_mock,
-            verify_sl4f_conn_mock, ssh_run_mock, wait_for_port_mock, ffx_mock,
-            sl4f_mock):
+        self,
+        list_interfaces_mock,
+        generate_ssh_config_mock,
+        ssh_wait_until_reachable_mock,
+        ssh_run_mock,
+        wait_for_port_mock,
+        ffx_mock,
+        sl4f_mock,
+    ):
         # Configure the log path which is required by ACTS logger.
-        logging.log_path = '/tmp/unit_test_garbage'
+        logging.log_path = "/tmp/unit_test_garbage"
 
-        ssh = SSHProvider(SSHConfig('192.168.1.1', 22, '/dev/null'))
+        ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
         ssh_run_mock.return_value = SSHResult(
-            subprocess.CompletedProcess([], 0, stdout=b'', stderr=b''))
+            subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
+        )
 
         # Don't try to wait for the SL4F server to start; it's not being used.
         wait_for_port_mock.return_value = None
-
-        sl4f_mock.return_value = SL4F(ssh, 'http://192.168.1.1:80')
-        verify_sl4f_conn_mock.return_value = None
+        ssh_wait_until_reachable_mock.return_value = None
+        sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
 
         list_interfaces_mock.return_value = FUCHSIA_INTERFACES
         self.assertEqual(
             utils.get_interface_ip_addresses(
-                FuchsiaDevice({'ip': '192.168.1.1'}), 'wlan1'),
-            CORRECT_EMPTY_IP_LIST)
+                FuchsiaDevice({"ip": "192.168.1.1"}), "wlan1"
+            ),
+            CORRECT_EMPTY_IP_LIST,
+        )
 
 
 class GetDeviceTest(unittest.TestCase):
-
     class TestDevice:
-
         def __init__(self, id, device_type=None) -> None:
             self.id = id
             if device_type:
@@ -561,24 +586,24 @@
 
     def test_get_device_none(self):
         devices = []
-        self.assertRaises(ValueError, utils.get_device, devices, 'DUT')
+        self.assertRaises(ValueError, utils.get_device, devices, "DUT")
 
     def test_get_device_default_one(self):
         devices = [self.TestDevice(0)]
-        self.assertEqual(utils.get_device(devices, 'DUT').id, 0)
+        self.assertEqual(utils.get_device(devices, "DUT").id, 0)
 
     def test_get_device_default_many(self):
         devices = [self.TestDevice(0), self.TestDevice(1)]
-        self.assertEqual(utils.get_device(devices, 'DUT').id, 0)
+        self.assertEqual(utils.get_device(devices, "DUT").id, 0)
 
     def test_get_device_specified_one(self):
-        devices = [self.TestDevice(0), self.TestDevice(1, 'DUT')]
-        self.assertEqual(utils.get_device(devices, 'DUT').id, 1)
+        devices = [self.TestDevice(0), self.TestDevice(1, "DUT")]
+        self.assertEqual(utils.get_device(devices, "DUT").id, 1)
 
     def test_get_device_specified_many(self):
-        devices = [self.TestDevice(0, 'DUT'), self.TestDevice(1, 'DUT')]
-        self.assertRaises(ValueError, utils.get_device, devices, 'DUT')
+        devices = [self.TestDevice(0, "DUT"), self.TestDevice(1, "DUT")]
+        self.assertRaises(ValueError, utils.get_device, devices, "DUT")
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/abstract_inst_test.py b/src/antlion/unit_tests/controllers/abstract_inst_test.py
deleted file mode 100755
index 7952f11..0000000
--- a/src/antlion/unit_tests/controllers/abstract_inst_test.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/usr/bin python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python unittest module for GNSS Abstract Instrument Library."""
-
-import socket
-import unittest
-from unittest.mock import Mock
-from unittest.mock import patch
-import antlion.controllers.abstract_inst as pyinst
-
-
-class SocketInstrumentTest(unittest.TestCase):
-    """A class for unit-testing antlion.controllers.gnssinst_lib.abstract_inst"""
-
-    @patch('socket.create_connection')
-    def test__connect_socket(self, mock_connect):
-        """test socket connection normal completion."""
-        mock_connect.return_value.recv.return_value = b'Dummy Instrument\n'
-
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-        test_inst._connect_socket()
-
-        mock_connect.assert_called_with(('192.168.1.11', '5050'), timeout=120)
-
-    @patch('socket.create_connection')
-    def test__connect_socket_timeout(self, mock_connect):
-        """test socket connection with timeout."""
-        mock_connect.side_effect = socket.timeout
-
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._connect_socket()
-
-    @patch('socket.create_connection')
-    def test__connect_socket_error(self, mock_connect):
-        """test socket connection with socket error."""
-        mock_connect.side_effect = socket.error
-
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._connect_socket()
-
-    def test__send(self):
-        """test send function with normal completion."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-
-        test_inst._send('TestCommand')
-
-        test_inst._socket.sendall.assert_called_with(b'TestCommand\n')
-
-    def test__send_timeout(self):
-        """test send function with timeout."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.sendall.side_effect = socket.timeout
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._send('TestCommand')
-
-    def test__send_error(self):
-        """test send function with error."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.sendall.side_effect = socket.error
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._send('TestCommand')
-
-    def test__recv(self):
-        """test recv function with normal completion."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.recv.return_value = b'TestResponse\n'
-
-        mock_resp = test_inst._recv()
-
-        self.assertEqual(mock_resp, 'TestResponse')
-
-    def test__recv_timeout(self):
-        """test recv function with timeout."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.recv.side_effect = socket.timeout
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._recv()
-
-    def test__recv_error(self):
-        """test recv function with error."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.recv.side_effect = socket.error
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._recv()
-
-    @patch('socket.create_connection')
-    def test__close_socket(self, mock_connect):
-        """test socket close normal completion."""
-        mock_connect.return_value.recv.return_value = b'Dummy Instrument\n'
-
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-        test_inst._connect_socket()
-        test_inst._close_socket()
-
-        mock_connect.return_value.shutdown.assert_called_with(socket.SHUT_RDWR)
-        mock_connect.return_value.close.assert_called_with()
-
-    def test__query(self):
-        """test query function with normal completion."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.recv.return_value = b'TestResponse\n'
-
-        mock_resp = test_inst._query('TestCommand')
-
-        test_inst._socket.sendall.assert_called_with(b'TestCommand;*OPC?\n')
-        self.assertEqual(mock_resp, 'TestResponse')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/android_lib/logcat_test.py b/src/antlion/unit_tests/controllers/android_lib/logcat_test.py
index dd0c57f..8cefca2 100644
--- a/src/antlion/unit_tests/controllers/android_lib/logcat_test.py
+++ b/src/antlion/unit_tests/controllers/android_lib/logcat_test.py
@@ -20,7 +20,7 @@
 from antlion.controllers.android_lib import logcat
 from antlion.controllers.android_lib.logcat import TimestampTracker
 
-BASE_TIMESTAMP = '2000-01-01 12:34:56.789   123 75348 '
+BASE_TIMESTAMP = "2000-01-01 12:34:56.789   123 75348 "
 
 
 class LogcatTest(unittest.TestCase):
@@ -28,7 +28,7 @@
 
     @staticmethod
     def patch(patched):
-        return mock.patch('antlion.controllers.android_lib.logcat.%s' % patched)
+        return mock.patch("antlion.controllers.android_lib.logcat.%s" % patched)
 
     def setUp(self):
         self._get_log_level = logcat._get_log_level
@@ -40,58 +40,58 @@
 
     def test_read_output_sets_last_timestamp_if_found(self):
         tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP + 'D message')
+        tracker.read_output(BASE_TIMESTAMP + "D message")
 
-        self.assertEqual(tracker.last_timestamp, '2000-01-01 12:34:56.789')
+        self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
 
     def test_read_output_keeps_last_timestamp_if_no_new_stamp_is_found(self):
         tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP + 'D message')
-        tracker.read_output('--------- beginning of main')
+        tracker.read_output(BASE_TIMESTAMP + "D message")
+        tracker.read_output("--------- beginning of main")
 
-        self.assertEqual(tracker.last_timestamp, '2000-01-01 12:34:56.789')
+        self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
 
     def test_read_output_updates_timestamp_to_first_in_results(self):
         tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP + 'D 9999-99-99 12:34:56.789')
+        tracker.read_output(BASE_TIMESTAMP + "D 9999-99-99 12:34:56.789")
 
-        self.assertEqual(tracker.last_timestamp, '2000-01-01 12:34:56.789')
+        self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
 
     # _get_log_level
 
     def test_get_log_level_verbose(self):
         """Tests that Logcat's verbose logs make it to the debug level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'V')
+        level = logcat._get_log_level(BASE_TIMESTAMP + "V")
 
         self.assertEqual(level, logging.DEBUG)
 
     def test_get_log_level_debug(self):
         """Tests that Logcat's debug logs make it to the debug level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'D')
+        level = logcat._get_log_level(BASE_TIMESTAMP + "D")
 
         self.assertEqual(level, logging.DEBUG)
 
     def test_get_log_level_info(self):
         """Tests that Logcat's info logs make it to the info level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'I')
+        level = logcat._get_log_level(BASE_TIMESTAMP + "I")
 
         self.assertEqual(level, logging.INFO)
 
     def test_get_log_level_warning(self):
         """Tests that Logcat's warning logs make it to the warning level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'W')
+        level = logcat._get_log_level(BASE_TIMESTAMP + "W")
 
         self.assertEqual(level, logging.WARNING)
 
     def test_get_log_level_error(self):
         """Tests that Logcat's error logs make it to the error level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'E')
+        level = logcat._get_log_level(BASE_TIMESTAMP + "E")
 
         self.assertEqual(level, logging.ERROR)
 
     def test_get_log_level_markers(self):
         """Tests that Logcat's marker logs make it to the error level."""
-        level = logcat._get_log_level('--------- beginning of main')
+        level = logcat._get_log_level("--------- beginning of main")
 
         self.assertEqual(level, logging.ERROR)
 
@@ -101,7 +101,7 @@
         logcat._get_log_level = lambda message: logging.INFO
         tracker = mock.Mock()
         log = mock.Mock()
-        message = 'MESSAGE'
+        message = "MESSAGE"
 
         logcat._log_line_func(log, tracker)(message)
 
@@ -112,7 +112,7 @@
         logcat._get_log_level = lambda message: logging.INFO
         tracker = mock.Mock()
         log = mock.Mock()
-        message = 'MESSAGE'
+        message = "MESSAGE"
 
         logcat._log_line_func(log, tracker)(message)
 
@@ -124,52 +124,50 @@
     def test_on_retry_returns_func_that_formats_with_last_timestamp(self):
         tracker = TimestampTracker()
         tracker.read_output(BASE_TIMESTAMP)
-        new_command = logcat._on_retry('S3R14L', 'extra_params', tracker)(None)
+        new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
 
         self.assertIn('-T "%s"' % tracker.last_timestamp, new_command)
 
     def test_on_retry_func_returns_string_that_contains_the_given_serial(self):
         tracker = TimestampTracker()
         tracker.read_output(BASE_TIMESTAMP)
-        new_command = logcat._on_retry('S3R14L', 'extra_params', tracker)(None)
+        new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
 
-        self.assertTrue('-s S3R14L' in new_command)
+        self.assertTrue("-s S3R14L" in new_command)
 
     def test_on_retry_func_returns_string_that_contains_any_extra_params(self):
         tracker = TimestampTracker()
         tracker.read_output(BASE_TIMESTAMP)
-        new_command = logcat._on_retry('S3R14L', 'extra_params', tracker)(None)
+        new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
 
-        self.assertTrue('extra_params' in new_command)
+        self.assertTrue("extra_params" in new_command)
 
     # create_logcat_keepalive_process
 
     def test_create_logcat_keepalive_process_creates_a_new_logger(self):
-        with self.patch('log_stream') as log_stream, self.patch('Process'):
-            logcat.create_logcat_keepalive_process('S3R14L', 'dir')
-        self.assertEqual(log_stream.create_logger.call_args[0][0],
-                         'adblog_S3R14L')
-        self.assertEqual(log_stream.create_logger.call_args[1]['subcontext'],
-                         'dir')
+        with self.patch("log_stream") as log_stream, self.patch("Process"):
+            logcat.create_logcat_keepalive_process("S3R14L", "dir")
+        self.assertEqual(log_stream.create_logger.call_args[0][0], "adblog_S3R14L")
+        self.assertEqual(log_stream.create_logger.call_args[1]["subcontext"], "dir")
 
     def test_create_logcat_keepalive_process_creates_a_new_process(self):
-        with self.patch('log_stream'), self.patch('Process') as process:
-            logcat.create_logcat_keepalive_process('S3R14L', 'dir')
+        with self.patch("log_stream"), self.patch("Process") as process:
+            logcat.create_logcat_keepalive_process("S3R14L", "dir")
 
-        self.assertIn('S3R14L', process.call_args[0][0])
+        self.assertIn("S3R14L", process.call_args[0][0])
 
     def test_create_logcat_keepalive_process_sets_output_callback(self):
-        with self.patch('log_stream'), self.patch('Process'):
-            process = logcat.create_logcat_keepalive_process('S3R14L', 'dir')
+        with self.patch("log_stream"), self.patch("Process"):
+            process = logcat.create_logcat_keepalive_process("S3R14L", "dir")
 
         self.assertEqual(process.set_on_output_callback.called, True)
 
     def test_create_logcat_keepalive_process_sets_on_terminate_callback(self):
-        with self.patch('log_stream'), self.patch('Process'):
-            process = logcat.create_logcat_keepalive_process('S3R14L', 'dir')
+        with self.patch("log_stream"), self.patch("Process"):
+            process = logcat.create_logcat_keepalive_process("S3R14L", "dir")
 
         self.assertEqual(process.set_on_terminate_callback.called, True)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/android_lib/services_test.py b/src/antlion/unit_tests/controllers/android_lib/services_test.py
index 709d6e5..9510e21 100644
--- a/src/antlion/unit_tests/controllers/android_lib/services_test.py
+++ b/src/antlion/unit_tests/controllers/android_lib/services_test.py
@@ -37,30 +37,36 @@
         service.register()
         subscriptions = event_bus._event_bus._subscriptions
         self.assertTrue(
-            any(subscription._func == service._start for subscription in
-                subscriptions[AndroidStartServicesEvent]))
+            any(
+                subscription._func == service._start
+                for subscription in subscriptions[AndroidStartServicesEvent]
+            )
+        )
         self.assertTrue(
-            any(subscription._func == service._stop for subscription in
-                subscriptions[AndroidStopServicesEvent]))
+            any(
+                subscription._func == service._stop
+                for subscription in subscriptions[AndroidStopServicesEvent]
+            )
+        )
 
-    @unittest.mock.patch.object(services.AndroidService, '_start')
+    @unittest.mock.patch.object(services.AndroidService, "_start")
     def test_event_deliver_only_to_matching_serial(self, start_fn):
         """Test that the service only responds to events that matches its
         device serial.
         """
         event_bus._event_bus = event_bus._EventBus()
         service = services.AndroidService(mock.Mock())
-        service.ad.serial = 'right_serial'
+        service.ad.serial = "right_serial"
         service.register()
 
         wrong_ad = mock.Mock()
-        wrong_ad.serial = 'wrong_serial'
+        wrong_ad.serial = "wrong_serial"
         wrong_event = AndroidStartServicesEvent(wrong_ad)
         event_bus.post(wrong_event)
         start_fn.assert_not_called()
 
         right_ad = mock.Mock()
-        right_ad.serial = 'right_serial'
+        right_ad.serial = "right_serial"
         right_event = AndroidStartServicesEvent(right_ad)
         event_bus.post(right_event)
         start_fn.assert_called_with(right_event)
@@ -75,12 +81,18 @@
         service.unregister()
         subscriptions = event_bus._event_bus._subscriptions
         self.assertFalse(
-            any(subscription._func == service._start for subscription in
-                subscriptions[AndroidStartServicesEvent]))
+            any(
+                subscription._func == service._start
+                for subscription in subscriptions[AndroidStartServicesEvent]
+            )
+        )
         self.assertFalse(
-            any(subscription._func == service._stop for subscription in
-                subscriptions[AndroidStopServicesEvent]))
+            any(
+                subscription._func == service._stop
+                for subscription in subscriptions[AndroidStopServicesEvent]
+            )
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py b/src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
index f63ea5c..4770549 100644
--- a/src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
+++ b/src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
@@ -30,18 +30,21 @@
     def test_basic_dhcp_config(self):
         dhcp_conf = DhcpConfig()
 
-        expected_config = ('default-lease-time 600;\n' 'max-lease-time 7200;')
+        expected_config = "default-lease-time 600;\n" "max-lease-time 7200;"
 
         self.assertEqual(expected_config, dhcp_conf.render_config_file())
 
     def test_dhcp_config_with_lease_times(self):
         default_lease_time = 350
         max_lease_time = 5000
-        dhcp_conf = DhcpConfig(default_lease_time=default_lease_time,
-                               max_lease_time=max_lease_time)
+        dhcp_conf = DhcpConfig(
+            default_lease_time=default_lease_time, max_lease_time=max_lease_time
+        )
 
-        expected_config = (f'default-lease-time {default_lease_time};\n'
-                           f'max-lease-time {max_lease_time};')
+        expected_config = (
+            f"default-lease-time {default_lease_time};\n"
+            f"max-lease-time {max_lease_time};"
+        )
 
         self.assertEqual(expected_config, dhcp_conf.render_config_file())
 
@@ -50,51 +53,57 @@
         max_lease_time = 3000
         subnets = [
             # addresses from 10.10.1.0 - 10.10.1.255
-            Subnet(ipaddress.ip_network('10.10.1.0/24')),
+            Subnet(ipaddress.ip_network("10.10.1.0/24")),
             # 4 addresses from 10.10.3.0 - 10.10.3.3
-            Subnet(ipaddress.ip_network('10.10.3.0/30')),
+            Subnet(ipaddress.ip_network("10.10.3.0/30")),
             # 6 addresses from 10.10.5.20 - 10.10.5.25
-            Subnet(ipaddress.ip_network('10.10.5.0/24'),
-                   start=ipaddress.ip_address('10.10.5.20'),
-                   end=ipaddress.ip_address('10.10.5.25'),
-                   router=ipaddress.ip_address('10.10.5.255'),
-                   lease_time=60)
+            Subnet(
+                ipaddress.ip_network("10.10.5.0/24"),
+                start=ipaddress.ip_address("10.10.5.20"),
+                end=ipaddress.ip_address("10.10.5.25"),
+                router=ipaddress.ip_address("10.10.5.255"),
+                lease_time=60,
+            ),
         ]
-        dhcp_conf = DhcpConfig(subnets=subnets,
-                               default_lease_time=default_lease_time,
-                               max_lease_time=max_lease_time)
+        dhcp_conf = DhcpConfig(
+            subnets=subnets,
+            default_lease_time=default_lease_time,
+            max_lease_time=max_lease_time,
+        )
 
         # Unless an explicit start/end address is provided, the second
         # address in the range is used for "start", and the second to
         # last address is used for "end".
-        expected_config = (f'default-lease-time {default_lease_time};\n'
-                           f'max-lease-time {max_lease_time};\n'
-                           'subnet 10.10.1.0 netmask 255.255.255.0 {\n'
-                           '\tpool {\n'
-                           '\t\toption subnet-mask 255.255.255.0;\n'
-                           '\t\toption routers 10.10.1.1;\n'
-                           '\t\trange 10.10.1.2 10.10.1.254;\n'
-                           '\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n'
-                           '\t}\n'
-                           '}\n'
-                           'subnet 10.10.3.0 netmask 255.255.255.252 {\n'
-                           '\tpool {\n'
-                           '\t\toption subnet-mask 255.255.255.252;\n'
-                           '\t\toption routers 10.10.3.1;\n'
-                           '\t\trange 10.10.3.2 10.10.3.2;\n'
-                           '\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n'
-                           '\t}\n'
-                           '}\n'
-                           'subnet 10.10.5.0 netmask 255.255.255.0 {\n'
-                           '\tpool {\n'
-                           '\t\toption subnet-mask 255.255.255.0;\n'
-                           '\t\toption routers 10.10.5.255;\n'
-                           '\t\trange 10.10.5.20 10.10.5.25;\n'
-                           '\t\tdefault-lease-time 60;\n'
-                           '\t\tmax-lease-time 60;\n'
-                           '\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n'
-                           '\t}\n'
-                           '}')
+        expected_config = (
+            f"default-lease-time {default_lease_time};\n"
+            f"max-lease-time {max_lease_time};\n"
+            "subnet 10.10.1.0 netmask 255.255.255.0 {\n"
+            "\tpool {\n"
+            "\t\toption subnet-mask 255.255.255.0;\n"
+            "\t\toption routers 10.10.1.1;\n"
+            "\t\trange 10.10.1.2 10.10.1.254;\n"
+            "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
+            "\t}\n"
+            "}\n"
+            "subnet 10.10.3.0 netmask 255.255.255.252 {\n"
+            "\tpool {\n"
+            "\t\toption subnet-mask 255.255.255.252;\n"
+            "\t\toption routers 10.10.3.1;\n"
+            "\t\trange 10.10.3.2 10.10.3.2;\n"
+            "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
+            "\t}\n"
+            "}\n"
+            "subnet 10.10.5.0 netmask 255.255.255.0 {\n"
+            "\tpool {\n"
+            "\t\toption subnet-mask 255.255.255.0;\n"
+            "\t\toption routers 10.10.5.255;\n"
+            "\t\trange 10.10.5.20 10.10.5.25;\n"
+            "\t\tdefault-lease-time 60;\n"
+            "\t\tmax-lease-time 60;\n"
+            "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
+            "\t}\n"
+            "}"
+        )
 
         self.assertEqual(expected_config, dhcp_conf.render_config_file())
 
@@ -102,36 +111,39 @@
         default_lease_time = 150
         max_lease_time = 3000
         subnets = [
-            Subnet(ipaddress.ip_network('10.10.1.0/24'),
-                   additional_parameters={
-                       'allow': 'unknown-clients',
-                       'foo': 'bar'
-                   },
-                   additional_options={'my-option': 'some-value'}),
+            Subnet(
+                ipaddress.ip_network("10.10.1.0/24"),
+                additional_parameters={"allow": "unknown-clients", "foo": "bar"},
+                additional_options={"my-option": "some-value"},
+            ),
         ]
-        dhcp_conf = DhcpConfig(subnets=subnets,
-                               default_lease_time=default_lease_time,
-                               max_lease_time=max_lease_time)
+        dhcp_conf = DhcpConfig(
+            subnets=subnets,
+            default_lease_time=default_lease_time,
+            max_lease_time=max_lease_time,
+        )
 
         # Unless an explicit start/end address is provided, the second
         # address in the range is used for "start", and the second to
         # last address is used for "end".
-        expected_config = (f'default-lease-time {default_lease_time};\n'
-                           f'max-lease-time {max_lease_time};\n'
-                           'subnet 10.10.1.0 netmask 255.255.255.0 {\n'
-                           '\tpool {\n'
-                           '\t\toption subnet-mask 255.255.255.0;\n'
-                           '\t\toption routers 10.10.1.1;\n'
-                           '\t\trange 10.10.1.2 10.10.1.254;\n'
-                           '\t\tallow unknown-clients;\n'
-                           '\t\tfoo bar;\n'
-                           '\t\toption my-option some-value;\n'
-                           '\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n'
-                           '\t}\n'
-                           '}')
+        expected_config = (
+            f"default-lease-time {default_lease_time};\n"
+            f"max-lease-time {max_lease_time};\n"
+            "subnet 10.10.1.0 netmask 255.255.255.0 {\n"
+            "\tpool {\n"
+            "\t\toption subnet-mask 255.255.255.0;\n"
+            "\t\toption routers 10.10.1.1;\n"
+            "\t\trange 10.10.1.2 10.10.1.254;\n"
+            "\t\tallow unknown-clients;\n"
+            "\t\tfoo bar;\n"
+            "\t\toption my-option some-value;\n"
+            "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
+            "\t}\n"
+            "}"
+        )
 
         self.assertEqual(expected_config, dhcp_conf.render_config_file())
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py b/src/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
index 61c8cb6..17ee536 100644
--- a/src/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
+++ b/src/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
@@ -16,16 +16,23 @@
 
 import unittest
 
-from antlion.controllers.ap_lib.radio_measurement import BssidInformation, BssidInformationCapabilities, NeighborReportElement, PhyType
+from antlion.controllers.ap_lib.radio_measurement import (
+    BssidInformation,
+    BssidInformationCapabilities,
+    NeighborReportElement,
+    PhyType,
+)
 
-EXPECTED_BSSID = '01:23:45:ab:cd:ef'
+EXPECTED_BSSID = "01:23:45:ab:cd:ef"
 EXPECTED_BSSID_INFO_CAP = BssidInformationCapabilities(
-    spectrum_management=True, qos=True, apsd=True, radio_measurement=True)
+    spectrum_management=True, qos=True, apsd=True, radio_measurement=True
+)
 EXPECTED_OP_CLASS = 81
 EXPECTED_CHAN = 11
 EXPECTED_PHY = PhyType.HT
-EXPECTED_BSSID_INFO = BssidInformation(capabilities=EXPECTED_BSSID_INFO_CAP,
-                                       high_throughput=True)
+EXPECTED_BSSID_INFO = BssidInformation(
+    capabilities=EXPECTED_BSSID_INFO_CAP, high_throughput=True
+)
 
 
 class RadioMeasurementTest(unittest.TestCase):
@@ -38,19 +45,19 @@
         self.assertEqual(int(EXPECTED_BSSID_INFO_CAP), 0b111100)
 
     def test_bssid_information(self):
-        self.assertEqual(EXPECTED_BSSID_INFO.capabilities,
-                         EXPECTED_BSSID_INFO_CAP)
+        self.assertEqual(EXPECTED_BSSID_INFO.capabilities, EXPECTED_BSSID_INFO_CAP)
         self.assertEqual(EXPECTED_BSSID_INFO.high_throughput, True)
         # Must also test the numeric representation.
-        self.assertEqual(int(EXPECTED_BSSID_INFO),
-                         0b10001111000100000000000000000000)
+        self.assertEqual(int(EXPECTED_BSSID_INFO), 0b10001111000100000000000000000000)
 
     def test_neighbor_report_element(self):
-        element = NeighborReportElement(bssid=EXPECTED_BSSID,
-                                        bssid_information=EXPECTED_BSSID_INFO,
-                                        operating_class=EXPECTED_OP_CLASS,
-                                        channel_number=EXPECTED_CHAN,
-                                        phy_type=EXPECTED_PHY)
+        element = NeighborReportElement(
+            bssid=EXPECTED_BSSID,
+            bssid_information=EXPECTED_BSSID_INFO,
+            operating_class=EXPECTED_OP_CLASS,
+            channel_number=EXPECTED_CHAN,
+            phy_type=EXPECTED_PHY,
+        )
         self.assertEqual(element.bssid, EXPECTED_BSSID)
         self.assertEqual(element.bssid_information, EXPECTED_BSSID_INFO)
         self.assertEqual(element.operating_class, EXPECTED_OP_CLASS)
@@ -58,5 +65,5 @@
         self.assertEqual(element.phy_type, EXPECTED_PHY)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/radvd_test.py b/src/antlion/unit_tests/controllers/ap_lib/radvd_test.py
index 001ee8d..19d9f7e 100644
--- a/src/antlion/unit_tests/controllers/ap_lib/radvd_test.py
+++ b/src/antlion/unit_tests/controllers/ap_lib/radvd_test.py
@@ -24,12 +24,10 @@
 
 from antlion.controllers.ap_lib.radvd_config import RadvdConfig
 
-SEARCH_FILE = ('antlion.controllers.utils_lib.commands.shell.'
-               'ShellCommand.search_file')
-DELETE_FILE = ('antlion.controllers.utils_lib.commands.shell.ShellCommand.'
-               'delete_file')
+SEARCH_FILE = "antlion.controllers.utils_lib.commands.shell." "ShellCommand.search_file"
+DELETE_FILE = "antlion.controllers.utils_lib.commands.shell.ShellCommand." "delete_file"
 
-CORRECT_COMPLEX_RADVD_CONFIG = ("""interface wlan0 {
+CORRECT_COMPLEX_RADVD_CONFIG = """interface wlan0 {
     IgnoreIfMissing on;
     AdvSendAdvert off;
     UnicastOnly on;
@@ -74,16 +72,20 @@
         AdvRDNSSOpen on;
         AdvRDNSSLifetime 1025;
     };
-};""".replace("    ", "\t"))
+};""".replace(
+    "    ", "\t"
+)
 
-CORRECT_SIMPLE_RADVD_CONFIG = ("""interface wlan0 {
+CORRECT_SIMPLE_RADVD_CONFIG = """interface wlan0 {
     AdvSendAdvert on;
     prefix fd00::/64
     {
         AdvOnLink on;
         AdvAutonomous on;
     };
-};""".replace("    ", "\t"))
+};""".replace(
+    "    ", "\t"
+)
 
 
 def delete_file_mock(file_to_delete):
@@ -92,91 +94,89 @@
 
 
 def write_configs_mock(config_file_with_path, output_config):
-    with open(config_file_with_path, 'w+') as config_fileId:
+    with open(config_file_with_path, "w+") as config_fileId:
         config_fileId.write(output_config)
 
 
 class RadvdTest(unittest.TestCase):
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.kill')
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.kill")
     def test_radvd_ikill(self, kill):
         kill.return_value = True
-        radvd_mock = Radvd('mock_runner', 'wlan0')
+        radvd_mock = Radvd("mock_runner", "wlan0")
         self.assertIsNone(radvd_mock.stop())
 
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive')
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive")
     def test_radvd_is_alive_True(self, is_alive_mock):
         is_alive_mock.return_value = True
-        radvd_mock = Radvd('mock_runner', 'wlan0')
+        radvd_mock = Radvd("mock_runner", "wlan0")
         self.assertTrue(radvd_mock.is_alive())
 
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive')
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive")
     def test_radvd_is_alive_False(self, is_alive_mock):
         is_alive_mock.return_value = False
-        radvd_mock = Radvd('mock_runner', 'wlan0')
+        radvd_mock = Radvd("mock_runner", "wlan0")
         self.assertFalse(radvd_mock.is_alive())
 
-    @patch('antlion.controllers.ap_lib.radvd.Radvd._scan_for_errors')
-    @patch('antlion.controllers.ap_lib.radvd.Radvd.is_alive')
-    def test_wait_for_process_process_alive(self, is_alive_mock,
-                                            _scan_for_errors_mock):
+    @patch("antlion.controllers.ap_lib.radvd.Radvd._scan_for_errors")
+    @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
+    def test_wait_for_process_process_alive(self, is_alive_mock, _scan_for_errors_mock):
         is_alive_mock.return_value = True
         _scan_for_errors_mock.return_value = True
-        radvd_mock = Radvd('mock_runner', 'wlan0')
+        radvd_mock = Radvd("mock_runner", "wlan0")
         self.assertIsNone(radvd_mock._wait_for_process(timeout=2))
 
-    @patch('antlion.controllers.ap_lib.radvd.Radvd.is_alive')
+    @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
     @patch(SEARCH_FILE)
     def test_scan_for_errors_is_dead(self, search_file_mock, is_alive_mock):
         is_alive_mock.return_value = False
         search_file_mock.return_value = False
-        radvd_mock = Radvd('mock_runner', 'wlan0')
+        radvd_mock = Radvd("mock_runner", "wlan0")
         with self.assertRaises(Error) as context:
             radvd_mock._scan_for_errors(True)
-        self.assertTrue('Radvd failed to start' in str(context.exception))
+        self.assertTrue("Radvd failed to start" in str(context.exception))
 
-    @patch('antlion.controllers.ap_lib.radvd.Radvd.is_alive')
+    @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
     @patch(SEARCH_FILE)
-    def test_scan_for_errors_exited_prematurely(self, search_file_mock,
-                                                is_alive_mock):
+    def test_scan_for_errors_exited_prematurely(self, search_file_mock, is_alive_mock):
         is_alive_mock.return_value = True
         search_file_mock.return_value = True
-        radvd_mock = Radvd('mock_runner', 'wlan0')
+        radvd_mock = Radvd("mock_runner", "wlan0")
         with self.assertRaises(Error) as context:
             radvd_mock._scan_for_errors(True)
-        self.assertTrue('Radvd exited prematurely.' in str(context.exception))
+        self.assertTrue("Radvd exited prematurely." in str(context.exception))
 
-    @patch('antlion.controllers.ap_lib.radvd.Radvd.is_alive')
+    @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
     @patch(SEARCH_FILE)
     def test_scan_for_errors_success(self, search_file_mock, is_alive_mock):
         is_alive_mock.return_value = True
         search_file_mock.return_value = False
-        radvd_mock = Radvd('mock_runner', 'wlan0')
+        radvd_mock = Radvd("mock_runner", "wlan0")
         self.assertIsNone(radvd_mock._scan_for_errors(True))
 
     @patch(DELETE_FILE)
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file')
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file")
     def test_write_configs_simple(self, write_file, delete_file):
         delete_file.side_effect = delete_file_mock
         write_file.side_effect = write_configs_mock
         basic_radvd_config = RadvdConfig()
-        radvd_mock = Radvd('mock_runner', 'wlan0')
+        radvd_mock = Radvd("mock_runner", "wlan0")
         radvd_mock._write_configs(basic_radvd_config)
         radvd_config = radvd_mock._config_file
-        with open(radvd_config, 'r') as radvd_config_fileId:
+        with open(radvd_config, "r") as radvd_config_fileId:
             config_data = radvd_config_fileId.read()
             self.assertTrue(CORRECT_SIMPLE_RADVD_CONFIG == config_data)
 
     @patch(DELETE_FILE)
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file')
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file")
     def test_write_configs_complex(self, write_file, delete_file):
         delete_file.side_effect = delete_file_mock
         write_file.side_effect = write_configs_mock
         complex_radvd_config = RadvdConfig(
-            clients=['fe80::c66d:3c75:2cec:1d72', 'fe80::c66d:3c75:2cec:1d73'],
+            clients=["fe80::c66d:3c75:2cec:1d72", "fe80::c66d:3c75:2cec:1d73"],
             route=radvd_constants.DEFAULT_PREFIX,
             rdnss=[
-                '2401:fa00:480:7a00:4d56:5373:4549:1e29',
-                '2401:fa00:480:7a00:4d56:5373:4549:1e30',
+                "2401:fa00:480:7a00:4d56:5373:4549:1e29",
+                "2401:fa00:480:7a00:4d56:5373:4549:1e30",
             ],
             ignore_if_missing=radvd_constants.IGNORE_IF_MISSING_ON,
             adv_send_advert=radvd_constants.ADV_SEND_ADVERT_OFF,
@@ -197,27 +197,27 @@
             adv_home_agent_info=radvd_constants.ADV_HOME_AGENT_INFO_ON,
             home_agent_lifetime=100,
             home_agent_preference=100,
-            adv_mob_rtr_support_flag=radvd_constants.
-            ADV_MOB_RTR_SUPPORT_FLAG_OFF,
+            adv_mob_rtr_support_flag=radvd_constants.ADV_MOB_RTR_SUPPORT_FLAG_OFF,
             adv_interval_opt=radvd_constants.ADV_INTERVAL_OPT_ON,
             adv_on_link=radvd_constants.ADV_ON_LINK_OFF,
             adv_autonomous=radvd_constants.ADV_AUTONOMOUS_ON,
             adv_router_addr=radvd_constants.ADV_ROUTER_ADDR_OFF,
             adv_valid_lifetime=86400,
             adv_preferred_lifetime=14400,
-            base_6to4_interface='NA',
+            base_6to4_interface="NA",
             adv_route_lifetime=1024,
             adv_route_preference=radvd_constants.ADV_ROUTE_PREFERENCE_HIGH,
             adv_rdnss_preference=8,
             adv_rdnss_open=radvd_constants.ADV_RDNSS_OPEN_ON,
-            adv_rdnss_lifetime=1025)
-        radvd_mock = Radvd('mock_runner', 'wlan0')
+            adv_rdnss_lifetime=1025,
+        )
+        radvd_mock = Radvd("mock_runner", "wlan0")
         radvd_mock._write_configs(complex_radvd_config)
         radvd_config = radvd_mock._config_file
-        with open(radvd_config, 'r') as radvd_config_fileId:
+        with open(radvd_config, "r") as radvd_config_fileId:
             config_data = radvd_config_fileId.read()
             self.assertTrue(CORRECT_COMPLEX_RADVD_CONFIG == config_data)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py b/src/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
index 18d7aa7..0994a35 100644
--- a/src/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
+++ b/src/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
@@ -16,21 +16,30 @@
 
 import unittest
 
-from antlion.controllers.ap_lib.radio_measurement import BssidInformation, NeighborReportElement, PhyType
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionCandidateList, BssTransitionManagementRequest
+from antlion.controllers.ap_lib.radio_measurement import (
+    BssidInformation,
+    NeighborReportElement,
+    PhyType,
+)
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionCandidateList,
+    BssTransitionManagementRequest,
+)
 
 EXPECTED_NEIGHBOR_1 = NeighborReportElement(
-    bssid='01:23:45:ab:cd:ef',
+    bssid="01:23:45:ab:cd:ef",
     bssid_information=BssidInformation(),
     operating_class=81,
     channel_number=1,
-    phy_type=PhyType.HT)
+    phy_type=PhyType.HT,
+)
 EXPECTED_NEIGHBOR_2 = NeighborReportElement(
-    bssid='cd:ef:ab:45:67:89',
+    bssid="cd:ef:ab:45:67:89",
     bssid_information=BssidInformation(),
     operating_class=121,
     channel_number=149,
-    phy_type=PhyType.VHT)
+    phy_type=PhyType.VHT,
+)
 EXPECTED_NEIGHBORS = [EXPECTED_NEIGHBOR_1, EXPECTED_NEIGHBOR_2]
 EXPECTED_CANDIDATE_LIST = BssTransitionCandidateList(EXPECTED_NEIGHBORS)
 
@@ -40,12 +49,13 @@
         request = BssTransitionManagementRequest(
             disassociation_imminent=True,
             abridged=True,
-            candidate_list=EXPECTED_NEIGHBORS)
+            candidate_list=EXPECTED_NEIGHBORS,
+        )
         self.assertTrue(request.disassociation_imminent)
         self.assertTrue(request.abridged)
         self.assertIn(EXPECTED_NEIGHBOR_1, request.candidate_list)
         self.assertIn(EXPECTED_NEIGHBOR_2, request.candidate_list)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/bits_lib/__init__.py b/src/antlion/unit_tests/controllers/bits_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/bits_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/bits_lib/bits_client_test.py b/src/antlion/unit_tests/controllers/bits_lib/bits_client_test.py
deleted file mode 100644
index 8acdbe7..0000000
--- a/src/antlion/unit_tests/controllers/bits_lib/bits_client_test.py
+++ /dev/null
@@ -1,326 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from datetime import datetime
-import unittest
-
-from antlion.libs.proc import job
-from antlion.controllers.bits_lib import bits_client
-from antlion.controllers.bits_lib import bits_service_config
-import mock
-
-CONTROLLER_CONFIG_WITH_MONSOON = {
-    'Monsoon': {'serial_num': 1234, 'monsoon_voltage': 4.2}
-}
-
-MONSOONED_CONFIG = bits_service_config.BitsServiceConfig(
-    CONTROLLER_CONFIG_WITH_MONSOON, lvpm_monsoon_bin='lvpm.par')
-
-CONTROLLER_CONFIG_WITHOUT_MONSOON = {}
-
-NON_MONSOONED_CONFIG = bits_service_config.BitsServiceConfig(
-    CONTROLLER_CONFIG_WITHOUT_MONSOON)
-
-KIBBLES_CONFIG = bits_service_config.BitsServiceConfig(
-    {
-        'Kibbles': [{
-            'board':     'board',
-            'connector': 'connector',
-            'serial':    'serial',
-        }],
-    },
-    kibble_bin='bin',
-    kibble_board_file='file.board',
-    virtual_metrics_file='file.vm')
-
-
-class BitsClientTest(unittest.TestCase):
-
-    def setUp(self):
-        super().setUp()
-        self.mock_service = mock.Mock()
-        self.mock_service.port = '42'
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_execute_generic_command(self, mock_run):
-        mock_service = mock.Mock()
-        mock_service.port = '1337'
-        client = bits_client.BitsClient('bits.par', mock_service,
-                                        service_config=KIBBLES_CONFIG)
-
-        client.run_cmd('-i', '-am', '-not', '-a', '-teapot', timeout=12345)
-
-        expected_final_command = ['bits.par',
-                                  '--port',
-                                  '1337',
-                                  '-i',
-                                  '-am',
-                                  '-not',
-                                  '-a',
-                                  '-teapot']
-        mock_run.assert_called_with(expected_final_command, timeout=12345)
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_start_collection__without_monsoon__does_not_disconnect_monsoon(
-        self,
-        mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=NON_MONSOONED_CONFIG)
-
-        client.start_collection('collection')
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        non_expected_call = list(
-            filter(lambda call: 'usb_disconnect' in call.args[0],
-                   args_list))
-        self.assertEqual(len(non_expected_call), 0,
-                         'did not expect call with usb_disconnect')
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_start_collection__frecuency_arg_gets_populated(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.start_collection('collection', default_sampling_rate=12345)
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_calls = list(
-            filter(lambda call: '--time' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 1, 'expected 1 calls with --time')
-        self.assertIn('--default_sampling_rate', expected_calls[0][0][0])
-        self.assertIn('12345', expected_calls[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_start_collection__sampling_rate_defaults_to_1000(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.start_collection('collection')
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_calls = list(
-            filter(lambda call: '--time' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 1, 'expected 1 calls with --time')
-        self.assertIn('--default_sampling_rate', expected_calls[0][0][0])
-        self.assertIn('1000', expected_calls[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_stop_collection__usb_not_automanaged__does_not_connect_monsoon(
-        self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.stop_collection('collection')
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        non_expected_call = list(
-            filter(lambda call: 'usb_connect' in call.args[0], args_list))
-        self.assertEqual(len(non_expected_call), 0,
-                         'did not expect call with usb_connect')
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_export_ignores_dataseries_gaps(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.export('collection', '/path/a.7z.bits')
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_call = list(
-            filter(
-                lambda call: '--ignore_gaps' in call.args[0] and '--export' in
-                             call.args[0], args_list))
-        self.assertEqual(len(expected_call), 1,
-                         'expected a call with --ignore_gaps and --export')
-        self.assertIn('--ignore_gaps', expected_call[0].args[0])
-
-    def test_export_path_must_end_in_bits_file_extension(self):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        self.assertRaisesRegex(
-            bits_client.BitsClientError,
-            r'collections can only be exported to files ending in .7z.bits',
-            client.export, 'collection', '/path/')
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_export_as_csv(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-        output_file = '/path/to/csv'
-        collection = 'collection'
-
-        client.export_as_csv([':mW', ':mV'], collection, output_file)
-
-        mock_run.assert_called()
-        cmd = mock_run.call_args_list[0].args[0]
-        self.assertIn(collection, cmd)
-        self.assertIn(output_file, cmd)
-        self.assertIn(':mW,:mV', cmd)
-        self.assertNotIn('--vm_file', cmd)
-        self.assertNotIn('default', cmd)
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_export_as_csv_with_virtual_metrics_file(self, mock_run):
-        output_file = '/path/to/csv'
-        collection = 'collection'
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=KIBBLES_CONFIG)
-
-        client.export_as_csv([':mW', ':mV'], collection, output_file)
-
-        mock_run.assert_called()
-        cmd = mock_run.call_args_list[0].args[0]
-        self.assertIn(collection, cmd)
-        self.assertIn(':mW,:mV', cmd)
-        self.assertIn('--vm_file', cmd)
-        self.assertIn('default', cmd)
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_add_markers(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.add_markers('collection', [(1, 'ein'),
-                                          (2, 'zwei'),
-                                          (3, 'drei')])
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_calls = list(
-            filter(lambda call: '--log' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 3, 'expected 3 calls with --log')
-        self.assertIn('--log_ts', expected_calls[0][0][0])
-        self.assertIn('1', expected_calls[0][0][0])
-        self.assertIn('ein', expected_calls[0][0][0])
-
-        self.assertIn('--log_ts', expected_calls[1][0][0])
-        self.assertIn('2', expected_calls[1][0][0])
-        self.assertIn('zwei', expected_calls[1][0][0])
-
-        self.assertIn('--log_ts', expected_calls[2][0][0])
-        self.assertIn('3', expected_calls[2][0][0])
-        self.assertIn('drei', expected_calls[2][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_add_markers_with_datetimes(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.add_markers('collection',
-                           [(datetime.utcfromtimestamp(1), 'ein'),
-                            (2e9, 'zwei'),
-                            (datetime.utcfromtimestamp(3), 'drei')])
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_calls = list(
-            filter(lambda call: '--log' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 3, 'expected 3 calls with --log')
-        self.assertIn('--log_ts', expected_calls[0][0][0])
-        self.assertIn(str(int(1e9)), expected_calls[0][0][0])
-        self.assertIn('ein', expected_calls[0][0][0])
-
-        self.assertIn('--log_ts', expected_calls[1][0][0])
-        self.assertIn(str(int(2e9)), expected_calls[1][0][0])
-        self.assertIn('zwei', expected_calls[1][0][0])
-
-        self.assertIn('--log_ts', expected_calls[2][0][0])
-        self.assertIn(str(int(3e9)), expected_calls[2][0][0])
-        self.assertIn('drei', expected_calls[2][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_get_metrics(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.get_metrics('collection', 8888, 9999)
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_call = list(
-            filter(lambda call: '--aggregates_yaml_path' in call.args[0],
-                   args_list))
-        self.assertEqual(len(expected_call), 1,
-                         'expected a call with --aggregates_yaml_path')
-        self.assertIn('8888', expected_call[0][0][0])
-        self.assertIn('--ignore_gaps', expected_call[0][0][0])
-        self.assertIn('--abs_stop_time', expected_call[0][0][0])
-        self.assertIn('9999', expected_call[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_get_metrics_with_datetime_markers(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.get_metrics('collection',
-                           datetime.utcfromtimestamp(1),
-                           datetime.utcfromtimestamp(2))
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_call = list(
-            filter(lambda call: '--aggregates_yaml_path' in call.args[0],
-                   args_list))
-        self.assertEqual(len(expected_call), 1,
-                         'expected a call with --aggregates_yaml_path')
-        self.assertIn(str(int(1e9)), expected_call[0][0][0])
-        self.assertIn('--ignore_gaps', expected_call[0][0][0])
-        self.assertIn('--abs_stop_time', expected_call[0][0][0])
-        self.assertIn(str(int(2e9)), expected_call[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_get_metrics_with_virtual_metrics_file(self, mock_run):
-        service_config = mock.Mock()
-        service_config.has_virtual_metrics_file = True
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=service_config)
-
-        client.get_metrics(8888, 9999)
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_call = list(
-            filter(lambda call: '--aggregates_yaml_path' in call.args[0],
-                   args_list))
-        self.assertEqual(len(expected_call), 1,
-                         'expected a call with --aggregates_yaml_path')
-        self.assertIn('--vm_file', expected_call[0][0][0])
-        self.assertIn('default', expected_call[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run',
-                return_value=job.Result(stdout=bytes('device', 'utf-8')))
-    def test_list_devices(self, mock_run):
-        service_config = mock.Mock()
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=service_config)
-
-        result = client.list_devices()
-
-        mock_run.assert_called()
-        cmd = mock_run.call_args_list[0].args[0]
-        self.assertIn('--list', cmd)
-        self.assertIn('devices', cmd)
-        self.assertEqual(result, 'device')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/bits_lib/bits_service_config_test.py b/src/antlion/unit_tests/controllers/bits_lib/bits_service_config_test.py
deleted file mode 100644
index 497239e..0000000
--- a/src/antlion/unit_tests/controllers/bits_lib/bits_service_config_test.py
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.controllers.bits_lib import bits_service_config
-
-
-class BitsServiceConfigTest(unittest.TestCase):
-
-    def test_basic_config(self):
-        config_dic = bits_service_config.BitsServiceConfig({}).config_dic
-        self.assertIn('devices', config_dic)
-        self.assertIn('default_device', config_dic['devices'])
-        self.assertIn('collectors', config_dic['devices']['default_device'])
-
-    def test_bits_service_config_has_an_enabled_default_device(self):
-        config_dic = bits_service_config.BitsServiceConfig({}).config_dic
-        self.assertEqual(1, config_dic['devices']['default_device']['enabled'])
-
-
-class BitsServiceConfigWithMonsoonTest(unittest.TestCase):
-
-    def test_monsoon_with_serial_less_than_20000_is_configured_as_non_hv(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 19999, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='lvpm_bin', hvpm_monsoon_bin='hvpm_bin')
-        self.assertEqual(0, config.config_dic['hv_monsoon'])
-        self.assertEqual('lvpm_bin', config.config_dic['monsoon_binary_path'])
-
-    def test_lvpm_monsoon_requires_lvpm_binary(self):
-        self.assertRaisesRegex(ValueError,
-                               r'lvpm_monsoon binary is needed but was None.',
-                               bits_service_config._BitsMonsoonConfig,
-                               {'serial_num': 19999, 'monsoon_voltage': 1},
-                               hvpm_monsoon_bin='hvpm_bin')
-
-    def test_monsoon_with_serial_greater_than_20000_is_configured_as_hv(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 20001, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='lvpm_bin', hvpm_monsoon_bin='hvpm_bin')
-        self.assertEqual(1, config.config_dic['hv_monsoon'])
-        self.assertEqual('hvpm_bin', config.config_dic['monsoon_binary_path'])
-
-    def test_hvpm_monsoon_requires_hvpm_binary(self):
-        self.assertRaisesRegex(ValueError,
-                               r'hvpm_monsoon binary is needed but was None.',
-                               bits_service_config._BitsMonsoonConfig,
-                               {'serial_num': 20001, 'monsoon_voltage': 1},
-                               lvpm_monsoon_bin='hvpm_bin')
-
-    def test_monsoon_config_fails_without_voltage(self):
-        self.assertRaisesRegex(ValueError,
-                               r'Monsoon voltage can not be undefined.',
-                               bits_service_config._BitsMonsoonConfig,
-                               {'serial_num': 1},
-                               lvpm_monsoon_bin='lvpm_bin')
-
-    def test_monsoon_config_fails_without_serial(self):
-        self.assertRaisesRegex(ValueError,
-                               r'Monsoon serial_num can not be undefined.',
-                               bits_service_config._BitsMonsoonConfig,
-                               {'monsoon_voltage': 1},
-                               lvpm_monsoon_bin='lvpm_bin')
-
-    def test_monsoon_config_is_always_enabled(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 1, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='bin')
-        self.assertEqual(1, config.config_dic['enabled'])
-
-    def test_monsoon_config_disables_monsoon_reseting(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 1, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='bin')
-        self.assertEqual(0, config.config_dic['monsoon_reset'])
-
-    def test_monsoon_config_type_is_monsooncollector(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 1, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='bin')
-        self.assertEqual('monsooncollector', config.config_dic['type'])
-
-    def test_bits_service_config_without_monsoon(self):
-        service_config = bits_service_config.BitsServiceConfig({})
-        self.assertFalse(service_config.has_monsoon)
-
-    def test_bits_service_config_with_a_monsoon(self):
-        service_config = bits_service_config.BitsServiceConfig(
-            {'Monsoon': {'serial_num': 1, 'monsoon_voltage': 1}},
-            lvpm_monsoon_bin='bin')
-        config_dic = service_config.config_dic
-
-        self.assertTrue(service_config.has_monsoon)
-        self.assertIn('Monsoon',
-                      config_dic['devices']['default_device'][
-                          'collectors'])
-
-        monsoon_config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 1, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='bin').config_dic
-        self.assertEqual(monsoon_config,
-                         config_dic['devices']['default_device'][
-                             'collectors']['Monsoon'])
-
-
-class BitsServiceConfigWithKibblesTest(unittest.TestCase):
-    def test_bits_service_config_without_kibbles(self):
-        service_config = bits_service_config.BitsServiceConfig({})
-        self.assertFalse(service_config.has_kibbles)
-
-    def test_bits_service_config_with_kibbles_but_no_vm_files(self):
-        service_config = bits_service_config.BitsServiceConfig({'Kibbles': [
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}]},
-            kibble_bin='bin',
-            kibble_board_file='file.board')
-
-        self.assertFalse(service_config.has_virtual_metrics_file)
-
-    def test_bits_service_config_with_kibbles_and_vm_files(self):
-        service_config = bits_service_config.BitsServiceConfig({'Kibbles': [
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}]},
-            kibble_bin='bin',
-            kibble_board_file='file.board',
-            virtual_metrics_file='some_file.vm')
-        config_dic = service_config.config_dic
-
-        self.assertTrue(service_config.has_virtual_metrics_file)
-        self.assertIn('some_file.vm',
-                      config_dic['devices']['default_device']['vm_files'])
-
-    def test_bits_service_config_with_kibbles(self):
-        service_config = bits_service_config.BitsServiceConfig({'Kibbles': [
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}]},
-            kibble_bin='bin',
-            kibble_board_file='file.board')
-        config_dic = service_config.config_dic
-
-        self.assertTrue(service_config.has_kibbles)
-        self.assertIn('BOARD',
-                      config_dic['devices']['default_device']['collectors'])
-
-        boards_config = bits_service_config._BitsKibblesConfig([
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}],
-            kibble_bin='bin', kibble_board_file='file.board').boards_configs
-        self.assertEqual(boards_config['BOARD'],
-                         config_dic['devices']['default_device'][
-                             'collectors']['BOARD'])
-
-    def test_kibbles_get_grouped_by_board(self):
-        boards_config = bits_service_config._BitsKibblesConfig([
-            {'board': 'BOARD1', 'connector': 'A', 'serial': 'SERIAL1'},
-            {'board': 'BOARD2', 'connector': 'B', 'serial': 'SERIAL2'},
-            {'board': 'BOARD2', 'connector': 'C', 'serial': 'SERIAL3'}],
-            kibble_bin='bin',
-            kibble_board_file='file.board').boards_configs
-
-        self.assertIn('BOARD1', boards_config)
-        board1 = boards_config['BOARD1']
-        self.assertEqual(1, len(board1['attached_kibbles']))
-        self.assertIn('SERIAL1', board1['attached_kibbles'])
-
-        self.assertIn('BOARD2', boards_config)
-        board2 = boards_config['BOARD2']
-        self.assertEqual(2, len(board2['attached_kibbles']))
-        self.assertIn('SERIAL2', board2['attached_kibbles'])
-        self.assertIn('SERIAL3', board2['attached_kibbles'])
-
-    def test_kibble_config_type_is_kibblecollector(self):
-        board_config = bits_service_config._BitsKibblesConfig([
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}],
-            kibble_bin='bin',
-            kibble_board_file='file.board').boards_configs['BOARD']
-
-        self.assertEqual('kibblecollector', board_config['type'])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/bits_lib/bits_service_test.py b/src/antlion/unit_tests/controllers/bits_lib/bits_service_test.py
deleted file mode 100644
index 8c0f405..0000000
--- a/src/antlion/unit_tests/controllers/bits_lib/bits_service_test.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.controllers.bits_lib import bits_service
-from antlion.controllers.bits_lib import bits_service_config
-import mock
-
-SERVICE_CONFIG = bits_service_config.BitsServiceConfig(
-    {'Monsoon': {'serial_num': 538141, 'monsoon_voltage': 4.2}},
-    hvpm_monsoon_bin='hvpm.par')
-
-
-@mock.patch('antlion.controllers.bits_lib.bits_service.atexit')
-@mock.patch('builtins.open')
-class BitsServiceTest(unittest.TestCase):
-    def test_output_log_opens_on_creation(self, mock_open, *_):
-        bits_service.BitsService(SERVICE_CONFIG, 'binary', 'log_path')
-
-        mock_open.assert_called_with('log_path', 'w')
-
-    @mock.patch.object(bits_service.BitsService, '_write_extra_debug_logs')
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_output_log_gets_closed_on_cleanup(self, _, __, mock_open, *___):
-        mock_log = mock.Mock()
-        mock_open.return_value = mock_log
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        service._cleanup()
-
-        mock_log.close.assert_called_with()
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_monsoons_usb_gets_connected_on_cleanup(self, mock_run, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-
-        service._cleanup()
-
-        mock_run.assert_called()
-        self.assertIn('--usbpassthrough', mock_run.call_args[0][0])
-        self.assertIn('on', mock_run.call_args[0][0])
-
-    def test_service_can_not_be_started_twice(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        service.service_state = bits_service.BitsServiceStates.STARTED
-        with self.assertRaises(bits_service.BitsServiceError):
-            service.start()
-
-    def test_service_can_not_be_stoped_twice(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        service.service_state = bits_service.BitsServiceStates.STOPPED
-        with self.assertRaises(bits_service.BitsServiceError):
-            service.stop()
-
-    def test_stopped_service_can_not_be_started(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        service.service_state = bits_service.BitsServiceStates.STOPPED
-        with self.assertRaises(bits_service.BitsServiceError):
-            service.start()
-
-    def test_service_output_changes_service_reported_state(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        self.assertEqual(bits_service.BitsServiceStates.NOT_STARTED,
-                         service.service_state)
-
-        service.port = '1234'
-        service._output_callback('Started server!')
-
-        self.assertEqual(bits_service.BitsServiceStates.STARTED,
-                         service.service_state)
-
-    def test_service_output_defines_port(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-
-        service._output_callback('Server listening on ...:6174.')
-
-        self.assertIsNotNone(service.port)
-
-    @mock.patch('antlion.context.get_current_context')
-    @mock.patch('antlion.libs.proc.process.Process')
-    def test_top_level_call_is_timeout_if_timeout_is_defined(self, mock_process,
-                                                             *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path',
-                                           timeout=42)
-
-        def side_effect(*_, **__):
-            service.service_state = bits_service.BitsServiceStates.STARTED
-            return mock.Mock()
-
-        mock_process.side_effect = side_effect
-
-        service.start()
-
-        args, kwargs = mock_process.call_args
-        self.assertEqual('timeout', args[0][0])
-        self.assertEqual('--signal=SIGTERM', args[0][1])
-        self.assertEqual('--kill-after=60', args[0][2])
-        self.assertEqual('42', args[0][3])
-        self.assertEqual('binary', args[0][4])
-
-    @mock.patch.object(bits_service.BitsService, '_write_extra_debug_logs')
-    @mock.patch('antlion.context.get_current_context')
-    @mock.patch('antlion.libs.proc.process.Process')
-    def test_top_level_call_is_binary_if_timeout_is_not_defined(self,
-                                                                mock_process,
-                                                                *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-
-        def side_effect(*_, **__):
-            service.service_state = bits_service.BitsServiceStates.STARTED
-            return mock.Mock()
-
-        mock_process.side_effect = side_effect
-
-        service.start()
-
-        args, kwargs = mock_process.call_args
-        self.assertEqual('binary', args[0][0])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/bits_test.py b/src/antlion/unit_tests/controllers/bits_test.py
deleted file mode 100644
index 3b026ac..0000000
--- a/src/antlion/unit_tests/controllers/bits_test.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from antlion.controllers import bits
-from antlion.controllers import power_metrics
-
-
-class BitsTest(unittest.TestCase):
-
-    def test_metric_name_transformation_for_legacy_support(self):
-        avg_current = bits._transform_name('default_name.Monsoon.Monsoon:mA')
-        avg_power = bits._transform_name('default_name.Monsoon.Monsoon:mW')
-
-        self.assertEqual('avg_current', avg_current)
-        self.assertEqual('avg_power', avg_power)
-
-    def test_metric_name_transformation(self):
-        avg_current = bits._transform_name('default_name.slider.XYZ:mA')
-        avg_power = bits._transform_name('default_name.slider.ABCD:mW')
-        unknown_unit = bits._transform_name('default_name.aaaaa.QWERTY:unknown')
-
-        self.assertEqual('XYZ_avg_current', avg_current)
-        self.assertEqual('ABCD_avg_power', avg_power)
-        self.assertEqual('QWERTY', unknown_unit)
-
-    def test_raw_data_to_metrics(self):
-        raw_data = {'data': [
-            {'name': 'default_device.Monsoon.Monsoon:mA',
-             'avg': 21,
-             'unit': 'mA'},
-            {'name': 'default_device.Monsoon.Monsoon:mW',
-             'avg': 91,
-             'unit': 'mW'}]}
-
-        metrics = bits._raw_data_to_metrics(raw_data)
-        self.assertEqual(2, len(metrics))
-        self.assertEqual(
-            power_metrics.Metric(21, 'current', 'mA', 'avg_current'),
-            metrics[0])
-        self.assertEqual(
-            power_metrics.Metric(91, 'power', 'mW', 'avg_power'),
-            metrics[1])
-
-    def test_raw_data_to_metrics_messages_are_ignored(self):
-        raw_data = {'data': [
-            {'name': 'default_device.Log.UserInputs',
-             'avg': float('nan'),
-             'unit': 'Msg'},
-            {'name': 'default_device.Log.Warnings',
-             'avg': float('nan'),
-             'unit': 'Msg'}]}
-
-        metrics = bits._raw_data_to_metrics(raw_data)
-        self.assertEqual(0, len(metrics))
-
-    def test_get_single_file_get_first_element_of_a_list(self):
-        registry = {'some_key': ['first_element', 'second_element']}
-
-        result = bits._get_single_file(registry, 'some_key')
-
-        self.assertEqual('first_element', result)
-
-    def test_get_single_file_gets_string_if_registry_contains_string(self):
-        registry = {'some_key': 'this_is_a_string'}
-
-        result = bits._get_single_file(registry, 'some_key')
-
-        self.assertEqual('this_is_a_string', result)
-
-    def test_get_single_file_gets_none_if_value_is_undefined_or_empty_list(self):
-        registry = {'some_key': []}
-
-        result1 = bits._get_single_file(registry, 'some_key')
-        result2 = bits._get_single_file(registry, 'key_that_is_not_in_registry')
-
-        self.assertEqual(None, result1)
-        self.assertEqual(None, result2)
-
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/data/sample_monsoon_data b/src/antlion/unit_tests/controllers/data/sample_monsoon_data
deleted file mode 100644
index 2a70273..0000000
--- a/src/antlion/unit_tests/controllers/data/sample_monsoon_data
+++ /dev/null
@@ -1,10 +0,0 @@
-0s    3.67
-1s    3.69
-2s    0.95
-3s    3.06
-4s    2.17
-5s    1.62
-6s    3.95
-7s    2.47
-8s    1.11
-9s    0.47
diff --git a/src/antlion/unit_tests/controllers/iperf_client_test.py b/src/antlion/unit_tests/controllers/iperf_client_test.py
index bf20000..63b6d90 100644
--- a/src/antlion/unit_tests/controllers/iperf_client_test.py
+++ b/src/antlion/unit_tests/controllers/iperf_client_test.py
@@ -37,80 +37,79 @@
 
     def test_create_can_create_client_over_adb(self):
         self.assertIsInstance(
-            iperf_client.create([{'AndroidDevice': 'foo'}])[0],
+            iperf_client.create([{"AndroidDevice": "foo"}])[0],
             IPerfClientOverAdb,
-            'Unable to create IPerfClientOverAdb from create().'
+            "Unable to create IPerfClientOverAdb from create().",
         )
 
     def test_create_can_create_client_over_ssh(self):
         self.assertIsInstance(
-            iperf_client.create([{'ssh_config': {'user': '', 'host': ''}}])[0],
+            iperf_client.create(
+                [{"ssh_config": {"user": "root", "host": "192.168.42.11"}}]
+            )[0],
             IPerfClientOverSsh,
-            'Unable to create IPerfClientOverSsh from create().'
+            "Unable to create IPerfClientOverSsh from create().",
         )
 
     def test_create_can_create_local_client(self):
         self.assertIsInstance(
             iperf_client.create([{}])[0],
             IPerfClient,
-            'Unable to create IPerfClient from create().'
+            "Unable to create IPerfClient from create().",
         )
 
 
 class IPerfClientBaseTest(unittest.TestCase):
     """Tests antlion.controllers.iperf_client.IPerfClientBase."""
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_get_full_file_path_creates_parent_directory(self, mock_makedirs):
         # Will never actually be created/used.
-        logging.log_path = '/tmp/unit_test_garbage'
+        logging.log_path = "/tmp/unit_test_garbage"
 
         full_file_path = IPerfClientBase._get_full_file_path(0)
 
-        self.assertTrue(
-            mock_makedirs.called,
-            'Did not attempt to create a directory.'
-        )
+        self.assertTrue(mock_makedirs.called, "Did not attempt to create a directory.")
         self.assertEqual(
             os.path.dirname(full_file_path),
             mock_makedirs.call_args[ARGS][0],
-            'The parent directory of the full file path was not created.'
+            "The parent directory of the full file path was not created.",
         )
 
 
 class IPerfClientTest(unittest.TestCase):
     """Tests antlion.controllers.iperf_client.IPerfClient."""
 
-    @mock.patch('builtins.open')
-    @mock.patch('subprocess.call')
+    @mock.patch("builtins.open")
+    @mock.patch("subprocess.call")
     def test_start_writes_to_full_file_path(self, mock_call, mock_open):
         client = IPerfClient()
-        file_path = '/path/to/foo'
+        file_path = "/path/to/foo"
         client._get_full_file_path = lambda _: file_path
 
-        client.start('127.0.0.1', 'IPERF_ARGS', 'TAG')
+        client.start("127.0.0.1", "IPERF_ARGS", "TAG")
 
-        mock_open.assert_called_with(file_path, 'w')
+        mock_open.assert_called_with(file_path, "w")
         self.assertEqual(
-            mock_call.call_args[KWARGS]['stdout'],
+            mock_call.call_args[KWARGS]["stdout"],
             mock_open().__enter__.return_value,
-            'IPerfClient did not write the logs to the expected file.'
+            "IPerfClient did not write the logs to the expected file.",
         )
 
 
 class IPerfClientOverSshTest(unittest.TestCase):
     """Test antlion.controllers.iperf_client.IPerfClientOverSshTest."""
 
-    @mock.patch('builtins.open')
+    @mock.patch("builtins.open")
     def test_start_writes_output_to_full_file_path(self, mock_open):
-        client = IPerfClientOverSsh({'host': '', 'user': ''})
+        client = IPerfClientOverSsh({"host": "192.168.42.11", "user": "root"})
         client._ssh_session = mock.Mock()
-        file_path = '/path/to/foo'
+        file_path = "/path/to/foo"
         client._get_full_file_path = lambda _: file_path
 
-        client.start('127.0.0.1', 'IPERF_ARGS', 'TAG')
+        client.start("127.0.0.1", "IPERF_ARGS", "TAG")
 
-        mock_open.assert_called_with(file_path, 'w')
+        mock_open.assert_called_with(file_path, "w")
         mock_open().__enter__().write.assert_called_with(
             client._ssh_session.run().stdout
         )
@@ -119,21 +118,22 @@
 class IPerfClientOverAdbTest(unittest.TestCase):
     """Test antlion.controllers.iperf_client.IPerfClientOverAdb."""
 
-    @mock.patch('builtins.open')
+    @mock.patch("builtins.open")
     def test_start_writes_output_to_full_file_path(self, mock_open):
         client = IPerfClientOverAdb(None)
-        file_path = '/path/to/foo'
-        expected_output = 'output'
+        file_path = "/path/to/foo"
+        expected_output = "output"
         client._get_full_file_path = lambda _: file_path
 
-        with mock.patch('antlion.controllers.iperf_client.'
-                        'IPerfClientOverAdb._android_device') as adb_device:
-            adb_device.adb.shell.return_value = 'output'
-            client.start('127.0.0.1', 'IPERF_ARGS', 'TAG')
+        with mock.patch(
+            "antlion.controllers.iperf_client." "IPerfClientOverAdb._android_device"
+        ) as adb_device:
+            adb_device.adb.shell.return_value = "output"
+            client.start("127.0.0.1", "IPERF_ARGS", "TAG")
 
-        mock_open.assert_called_with(file_path, 'w')
-        mock_open().__enter__().write.assert_called_with('output')
+        mock_open.assert_called_with(file_path, "w")
+        mock_open().__enter__().write.assert_called_with("output")
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/iperf_server_test.py b/src/antlion/unit_tests/controllers/iperf_server_test.py
index e9fc810..560a7e7 100644
--- a/src/antlion/unit_tests/controllers/iperf_server_test.py
+++ b/src/antlion/unit_tests/controllers/iperf_server_test.py
@@ -23,6 +23,7 @@
 from antlion.controllers.iperf_server import IPerfServer
 from antlion.controllers.iperf_server import IPerfServerOverAdb
 from antlion.controllers.iperf_server import IPerfServerOverSsh
+from antlion.controllers.utils_lib.ssh import settings
 
 # The position in the call tuple that represents the args array.
 ARGS = 0
@@ -30,83 +31,87 @@
 # The position in the call tuple that represents the kwargs dict.
 KWARGS = 1
 
-MOCK_LOGFILE_PATH = '/path/to/foo'
+MOCK_LOGFILE_PATH = "/path/to/foo"
 
 
 class IPerfServerModuleTest(unittest.TestCase):
     """Tests the antlion.controllers.iperf_server module."""
+
     def test_create_creates_local_iperf_server_with_int(self):
         self.assertIsInstance(
-            iperf_server.create([12345])[0], IPerfServer,
-            'create() failed to create IPerfServer for integer input.')
+            iperf_server.create([12345])[0],
+            IPerfServer,
+            "create() failed to create IPerfServer for integer input.",
+        )
 
     def test_create_creates_local_iperf_server_with_str(self):
         self.assertIsInstance(
-            iperf_server.create(['12345'])[0], IPerfServer,
-            'create() failed to create IPerfServer for integer input.')
+            iperf_server.create(["12345"])[0],
+            IPerfServer,
+            "create() failed to create IPerfServer for integer input.",
+        )
 
     def test_create_cannot_create_local_iperf_server_with_bad_str(self):
         with self.assertRaises(ValueError):
-            iperf_server.create(['12345BAD_STRING'])
+            iperf_server.create(["12345BAD_STRING"])
 
-    @mock.patch('antlion.controllers.iperf_server.utils')
+    @mock.patch("antlion.controllers.iperf_server.utils")
     def test_create_creates_server_over_ssh_with_ssh_config_and_port(self, _):
         self.assertIsInstance(
-            iperf_server.create([{
-                'ssh_config': {
-                    'user': '',
-                    'host': ''
-                },
-                'port': ''
-            }])[0], IPerfServerOverSsh,
-            'create() failed to create IPerfServerOverSsh for a valid config.')
+            iperf_server.create([{"ssh_config": {"user": "", "host": ""}, "port": ""}])[
+                0
+            ],
+            IPerfServerOverSsh,
+            "create() failed to create IPerfServerOverSsh for a valid config.",
+        )
 
     def test_create_creates_server_over_adb_with_proper_config(self):
         self.assertIsInstance(
-            iperf_server.create([{
-                'AndroidDevice': '53R147',
-                'port': 0
-            }])[0], IPerfServerOverAdb,
-            'create() failed to create IPerfServerOverAdb for a valid config.')
+            iperf_server.create([{"AndroidDevice": "53R147", "port": 0}])[0],
+            IPerfServerOverAdb,
+            "create() failed to create IPerfServerOverAdb for a valid config.",
+        )
 
     def test_create_raises_value_error_on_bad_config_dict(self):
         with self.assertRaises(ValueError):
-            iperf_server.create([{
-                'AndroidDevice': '53R147',
-                'ssh_config': {}
-            }])
+            iperf_server.create([{"AndroidDevice": "53R147", "ssh_config": {}}])
 
     def test_get_port_from_ss_output_returns_correct_port_ipv4(self):
-        ss_output = ('tcp LISTEN  0 5 127.0.0.1:<PORT>  *:*'
-                     ' users:(("cmd",pid=<PID>,fd=3))')
+        ss_output = (
+            "tcp LISTEN  0 5 127.0.0.1:<PORT>  *:*" ' users:(("cmd",pid=<PID>,fd=3))'
+        )
         self.assertEqual(
-            iperf_server._get_port_from_ss_output(ss_output, '<PID>'),
-            '<PORT>')
+            iperf_server._get_port_from_ss_output(ss_output, "<PID>"), "<PORT>"
+        )
 
     def test_get_port_from_ss_output_returns_correct_port_ipv6(self):
-        ss_output = ('tcp LISTEN  0 5 ff:ff:ff:ff:ff:ff:<PORT>  *:*'
-                     ' users:(("cmd",pid=<PID>,fd=3))')
+        ss_output = (
+            "tcp LISTEN  0 5 ff:ff:ff:ff:ff:ff:<PORT>  *:*"
+            ' users:(("cmd",pid=<PID>,fd=3))'
+        )
         self.assertEqual(
-            iperf_server._get_port_from_ss_output(ss_output, '<PID>'),
-            '<PORT>')
+            iperf_server._get_port_from_ss_output(ss_output, "<PID>"), "<PORT>"
+        )
 
 
 class IPerfServerBaseTest(unittest.TestCase):
     """Tests antlion.controllers.iperf_server.IPerfServerBase."""
-    @mock.patch('os.makedirs')
+
+    @mock.patch("os.makedirs")
     def test_get_full_file_path_creates_parent_directory(self, mock_makedirs):
         # Will never actually be created/used.
-        logging.log_path = '/tmp/unit_test_garbage'
+        logging.log_path = "/tmp/unit_test_garbage"
 
-        server = IPerfServer('port')
+        server = IPerfServer("port")
 
         full_file_path = server._get_full_file_path()
 
-        self.assertTrue(mock_makedirs.called,
-                        'Did not attempt to create a directory.')
+        self.assertTrue(mock_makedirs.called, "Did not attempt to create a directory.")
         self.assertEqual(
-            os.path.dirname(full_file_path), mock_makedirs.call_args[ARGS][0],
-            'The parent directory of the full file path was not created.')
+            os.path.dirname(full_file_path),
+            mock_makedirs.call_args[ARGS][0],
+            "The parent directory of the full file path was not created.",
+        )
 
 
 class IPerfServerTest(unittest.TestCase):
@@ -117,23 +122,23 @@
     def setUp(self):
         iperf_server._get_port_from_ss_output = lambda *_: IPerfServerTest.PID
 
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
-    @mock.patch('antlion.controllers.iperf_server.job')
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
+    @mock.patch("antlion.controllers.iperf_server.job")
     def test_start_makes_started_true(self, mock_job, __, ___):
         """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServer('port')
+        server = IPerfServer("port")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server.start()
 
         self.assertTrue(server.started)
 
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
-    @mock.patch('antlion.controllers.iperf_server.job')
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
+    @mock.patch("antlion.controllers.iperf_server.job")
     def test_start_stop_makes_started_false(self, _, __, ___):
         """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServer('port')
+        server = IPerfServer("port")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
 
         server.start()
@@ -141,38 +146,42 @@
 
         self.assertFalse(server.started)
 
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
-    @mock.patch('antlion.controllers.iperf_server.job')
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
+    @mock.patch("antlion.controllers.iperf_server.job")
     def test_start_sets_current_log_file(self, _, __, ___):
-        server = IPerfServer('port')
+        server = IPerfServer("port")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
 
         server.start()
 
         self.assertEqual(
-            server._current_log_file, MOCK_LOGFILE_PATH,
-            'The _current_log_file was not received from _get_full_file_path.')
+            server._current_log_file,
+            MOCK_LOGFILE_PATH,
+            "The _current_log_file was not received from _get_full_file_path.",
+        )
 
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
     def test_stop_returns_current_log_file(self, _, __):
-        server = IPerfServer('port')
+        server = IPerfServer("port")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._current_log_file = MOCK_LOGFILE_PATH
         server._iperf_process = mock.Mock()
 
         log_file = server.stop()
 
-        self.assertEqual(log_file, MOCK_LOGFILE_PATH,
-                         'The _current_log_file was not returned by stop().')
+        self.assertEqual(
+            log_file,
+            MOCK_LOGFILE_PATH,
+            "The _current_log_file was not returned by stop().",
+        )
 
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
-    @mock.patch('antlion.controllers.iperf_server.job')
-    def test_start_does_not_run_two_concurrent_processes(
-            self, start_proc, _, __):
-        server = IPerfServer('port')
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
+    @mock.patch("antlion.controllers.iperf_server.job")
+    def test_start_does_not_run_two_concurrent_processes(self, start_proc, _, __):
+        server = IPerfServer("port")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_process = mock.Mock()
 
@@ -180,11 +189,12 @@
 
         self.assertFalse(
             start_proc.called,
-            'start() should not begin a second process if another is running.')
+            "start() should not begin a second process if another is running.",
+        )
 
-    @mock.patch('antlion.utils.stop_standing_subprocess')
+    @mock.patch("antlion.utils.stop_standing_subprocess")
     def test_stop_exits_early_if_no_process_has_started(self, stop_proc):
-        server = IPerfServer('port')
+        server = IPerfServer("port")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_process = None
 
@@ -192,15 +202,16 @@
 
         self.assertFalse(
             stop_proc.called,
-            'stop() should not kill a process if no process is running.')
+            "stop() should not kill a process if no process is running.",
+        )
 
 
 class IPerfServerOverSshTest(unittest.TestCase):
     """Tests antlion.controllers.iperf_server.IPerfServerOverSsh."""
 
-    INIT_ARGS = [{'host': 'TEST_HOST', 'user': 'test'}, 'PORT']
+    INIT_ARGS = [settings.from_config({"host": "TEST_HOST", "user": "test"}), "PORT"]
 
-    @mock.patch('antlion.controllers.iperf_server.connection')
+    @mock.patch("antlion.controllers.iperf_server.connection")
     def test_start_makes_started_true(self, _):
         """Tests calling start() without calling stop() makes started True."""
         server = IPerfServerOverSsh(*self.INIT_ARGS)
@@ -212,8 +223,8 @@
 
         self.assertTrue(server.started)
 
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.connection')
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.connection")
     def test_start_stop_makes_started_false(self, _, __):
         """Tests calling start() without calling stop() makes started True."""
         server = IPerfServerOverSsh(*self.INIT_ARGS)
@@ -226,8 +237,8 @@
 
         self.assertFalse(server.started)
 
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.connection')
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.connection")
     def test_stop_returns_expected_log_file(self, _, __):
         server = IPerfServerOverSsh(*self.INIT_ARGS)
         server._ssh_session = mock.Mock()
@@ -237,10 +248,13 @@
 
         log_file = server.stop()
 
-        self.assertEqual(log_file, MOCK_LOGFILE_PATH,
-                         'The expected log file was not returned by stop().')
+        self.assertEqual(
+            log_file,
+            MOCK_LOGFILE_PATH,
+            "The expected log file was not returned by stop().",
+        )
 
-    @mock.patch('antlion.controllers.iperf_server.connection')
+    @mock.patch("antlion.controllers.iperf_server.connection")
     def test_start_does_not_run_two_concurrent_processes(self, _):
         server = IPerfServerOverSsh(*self.INIT_ARGS)
         server._ssh_session = mock.Mock()
@@ -252,10 +266,11 @@
 
         self.assertFalse(
             server._ssh_session.run_async.called,
-            'start() should not begin a second process if another is running.')
+            "start() should not begin a second process if another is running.",
+        )
 
-    @mock.patch('antlion.utils.stop_standing_subprocess')
-    @mock.patch('antlion.controllers.iperf_server.connection')
+    @mock.patch("antlion.utils.stop_standing_subprocess")
+    @mock.patch("antlion.controllers.iperf_server.connection")
     def test_stop_exits_early_if_no_process_has_started(self, _, __):
         server = IPerfServerOverSsh(*self.INIT_ARGS)
         server._ssh_session = mock.Mock()
@@ -267,58 +282,63 @@
 
         self.assertFalse(
             server._ssh_session.run_async.called,
-            'stop() should not kill a process if no process is running.')
+            "stop() should not kill a process if no process is running.",
+        )
 
 
 class IPerfServerOverAdbTest(unittest.TestCase):
     """Tests antlion.controllers.iperf_server.IPerfServerOverSsh."""
 
-    ANDROID_DEVICE_PROP = ('antlion.controllers.iperf_server.'
-                           'IPerfServerOverAdb._android_device')
+    ANDROID_DEVICE_PROP = (
+        "antlion.controllers.iperf_server." "IPerfServerOverAdb._android_device"
+    )
 
     @mock.patch(ANDROID_DEVICE_PROP)
     def test_start_makes_started_true(self, mock_ad):
         """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServerOverAdb('53R147', 'PORT')
+        server = IPerfServerOverAdb("53R147", "PORT")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        mock_ad.adb.shell.return_value = '<PID>'
+        mock_ad.adb.shell.return_value = "<PID>"
 
         server.start()
 
         self.assertTrue(server.started)
 
-    @mock.patch('antlion.libs.proc.job.run')
-    @mock.patch('builtins.open')
+    @mock.patch("antlion.libs.proc.job.run")
+    @mock.patch("builtins.open")
     @mock.patch(ANDROID_DEVICE_PROP)
     def test_start_stop_makes_started_false(self, mock_ad, _, __):
         """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServerOverAdb('53R147', 'PORT')
+        server = IPerfServerOverAdb("53R147", "PORT")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        mock_ad.adb.shell.side_effect = ['<PID>', '', '', '']
+        mock_ad.adb.shell.side_effect = ["<PID>", "", "", ""]
 
         server.start()
         server.stop()
 
         self.assertFalse(server.started)
 
-    @mock.patch('antlion.libs.proc.job.run')
-    @mock.patch('builtins.open')
+    @mock.patch("antlion.libs.proc.job.run")
+    @mock.patch("builtins.open")
     @mock.patch(ANDROID_DEVICE_PROP)
     def test_stop_returns_expected_log_file(self, mock_ad, _, __):
-        server = IPerfServerOverAdb('53R147', 'PORT')
+        server = IPerfServerOverAdb("53R147", "PORT")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_process = mock.Mock()
-        server._iperf_process_adb_pid = '<PID>'
-        mock_ad.adb.shell.side_effect = ['', '', '']
+        server._iperf_process_adb_pid = "<PID>"
+        mock_ad.adb.shell.side_effect = ["", "", ""]
 
         log_file = server.stop()
 
-        self.assertEqual(log_file, MOCK_LOGFILE_PATH,
-                         'The expected log file was not returned by stop().')
+        self.assertEqual(
+            log_file,
+            MOCK_LOGFILE_PATH,
+            "The expected log file was not returned by stop().",
+        )
 
     @mock.patch(ANDROID_DEVICE_PROP)
     def test_start_does_not_run_two_concurrent_processes(self, android_device):
-        server = IPerfServerOverAdb('53R147', 'PORT')
+        server = IPerfServerOverAdb("53R147", "PORT")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_process = mock.Mock()
 
@@ -326,14 +346,14 @@
 
         self.assertFalse(
             android_device.adb.shell_nb.called,
-            'start() should not begin a second process if another is running.')
+            "start() should not begin a second process if another is running.",
+        )
 
-    @mock.patch('antlion.libs.proc.job.run')
-    @mock.patch('builtins.open')
+    @mock.patch("antlion.libs.proc.job.run")
+    @mock.patch("builtins.open")
     @mock.patch(ANDROID_DEVICE_PROP)
-    def test_stop_exits_early_if_no_process_has_started(
-            self, android_device, _, __):
-        server = IPerfServerOverAdb('53R147', 'PORT')
+    def test_stop_exits_early_if_no_process_has_started(self, android_device, _, __):
+        server = IPerfServerOverAdb("53R147", "PORT")
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_pid = None
 
@@ -341,8 +361,9 @@
 
         self.assertFalse(
             android_device.adb.shell_nb.called,
-            'stop() should not kill a process if no process is running.')
+            "stop() should not kill a process if no process is running.",
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/monsoon_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/monsoon_test.py
deleted file mode 100755
index b22831b..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/monsoon_test.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.api.hvpm.monsoon import Monsoon
-
-ASSEMBLY_LINE_IMPORT = ('antlion.controllers.monsoon_lib.api.hvpm.monsoon'
-                        '.AssemblyLineBuilder')
-DOWNSAMPLER_IMPORT = ('antlion.controllers.monsoon_lib.api.hvpm.monsoon'
-                      '.DownSampler')
-TEE_IMPORT = 'antlion.controllers.monsoon_lib.api.hvpm.monsoon.Tee'
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-
-class BaseMonsoonTest(unittest.TestCase):
-    """Tests antlion.controllers.monsoon_lib.api.monsoon.Monsoon."""
-
-    SERIAL = 534147
-
-    def setUp(self):
-        self.sleep_patch = mock.patch('time.sleep')
-        self.sleep_patch.start()
-
-        self.mp_manager_patch = mock.patch('multiprocessing.Manager')
-        self.mp_manager_patch.start()
-
-        proxy_mock = mock.MagicMock()
-        proxy_mock.Protocol.getValue.return_value = 1048576 * 4
-        self.monsoon_proxy = mock.patch(
-            'Monsoon.HVPM.Monsoon', return_value=proxy_mock)
-        self.monsoon_proxy.start()
-
-    def tearDown(self):
-        self.sleep_patch.stop()
-        self.monsoon_proxy.stop()
-        self.mp_manager_patch.stop()
-
-    def test_status_fills_status_packet_first(self):
-        """Tests fillStatusPacket() is called before returning the status.
-
-        If this is not done, the status packet returned is stale.
-        """
-
-        def verify_call_order():
-            if not self.monsoon_proxy().fillStatusPacket.called:
-                self.fail('fillStatusPacket must be called first.')
-
-        monsoon = Monsoon(self.SERIAL)
-        monsoon._mon.statusPacket.side_effect = verify_call_order
-
-        status_packet = monsoon.status
-
-        self.assertEqual(
-            status_packet, monsoon._mon.statusPacket,
-            'monsoon.status MUST return '
-            'MonsoonProxy.statusPacket.')
-
-    @mock.patch(DOWNSAMPLER_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_downsample_skipped_if_hz_unset(
-            self, _, downsampler):
-        """Tests the DownSampler transformer is skipped if it is not needed."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'output_path': None, 'transformers': None}
-
-        monsoon.measure_power(1, hz=5000, **unimportant_kwargs)
-
-        self.assertFalse(
-            downsampler.called,
-            'A Downsampler should not have been created for a the default '
-            'sampling frequency.')
-
-    @mock.patch(DOWNSAMPLER_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_downsamples_immediately_after_sampling(
-            self, assembly_line, downsampler):
-        """Tests """
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'output_path': None, 'transformers': None}
-
-        monsoon.measure_power(1, hz=500, **unimportant_kwargs)
-
-        downsampler.assert_called_once_with(int(round(5000 / 500)))
-        # Assert Downsampler() is the first element within the list.
-        self.assertEqual(assembly_line().into.call_args_list[0][ARGS][0],
-                         downsampler())
-
-    @mock.patch(TEE_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_tee_skipped_if_ouput_path_not_set(self, _, tee):
-        """Tests the Tee Transformer is not added when not needed."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'transformers': None}
-
-        monsoon.measure_power(1, output_path=None, **unimportant_kwargs)
-
-        self.assertFalse(
-            tee.called,
-            'A Tee Transformer should not have been created for measure_power '
-            'without an output_path.')
-
-    @mock.patch(TEE_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_tee_is_added_to_assembly_line(
-            self, assembly_line, tee):
-        """Tests Tee is added to the assembly line with the correct path."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'transformers': None}
-
-        monsoon.measure_power(1, output_path='foo', **unimportant_kwargs)
-
-        tee.assert_called_once_with('foo', 0)
-        # Assert Tee() is the first element within the assembly into calls.
-        self.assertEqual(assembly_line().into.call_args_list[0][ARGS][0],
-                         tee())
-
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_transformers_are_added(self, assembly_line):
-        """Tests additional transformers are added to the assembly line."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'output_path': None}
-        expected_transformers = [mock.Mock(), mock.Mock()]
-
-        monsoon.measure_power(
-            1, transformers=expected_transformers, **unimportant_kwargs)
-
-        self.assertEqual(expected_transformers[0],
-                         assembly_line().into.call_args_list[-2][ARGS][0])
-        self.assertEqual(expected_transformers[1],
-                         assembly_line().into.call_args_list[-1][ARGS][0])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/monsoon_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/monsoon_test.py
deleted file mode 100755
index 90cbadb..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/monsoon_test.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon import Monsoon
-
-ASSEMBLY_LINE_IMPORT = ('antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon'
-                        '.AssemblyLineBuilder')
-DOWNSAMPLER_IMPORT = ('antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon'
-                      '.DownSampler')
-TEE_IMPORT = 'antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon.Tee'
-MONSOON_PROXY_IMPORT = ('antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon'
-                        '.MonsoonProxy')
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-
-class BaseMonsoonTest(unittest.TestCase):
-    """Tests antlion.controllers.monsoon_lib.api.monsoon.Monsoon."""
-
-    SERIAL = 534147
-
-    def setUp(self):
-        self.sleep_patch = mock.patch('time.sleep')
-        self.sleep_patch.start()
-
-        self.mp_manager_patch = mock.patch('multiprocessing.Manager')
-        self.mp_manager_patch.start()
-
-        proxy_mock = mock.MagicMock()
-        proxy_mock.get_voltage.return_value = 4.2
-        self.monsoon_proxy = mock.patch(
-            MONSOON_PROXY_IMPORT, return_value=proxy_mock)
-        self.monsoon_proxy.start()
-
-    def tearDown(self):
-        self.sleep_patch.stop()
-        self.monsoon_proxy.stop()
-        self.mp_manager_patch.stop()
-
-    @mock.patch(DOWNSAMPLER_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_downsample_skipped_if_hz_unset(
-            self, _, downsampler):
-        """Tests the DownSampler transformer is skipped if it is not needed."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'output_path': None, 'transformers': None}
-
-        monsoon.measure_power(1, hz=5000, **unimportant_kwargs)
-
-        self.assertFalse(
-            downsampler.called,
-            'A Downsampler should not have been created for a the default '
-            'sampling frequency.')
-
-    @mock.patch(DOWNSAMPLER_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_downsamples_immediately_after_sampling(
-            self, assembly_line, downsampler):
-        """Tests """
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'output_path': None, 'transformers': None}
-
-        monsoon.measure_power(1, hz=500, **unimportant_kwargs)
-
-        downsampler.assert_called_once_with(int(round(5000 / 500)))
-        # Assert Downsampler() is the first element within the list.
-        self.assertEqual(assembly_line().into.call_args_list[0][ARGS][0],
-                         downsampler())
-
-    @mock.patch(TEE_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_tee_skipped_if_ouput_path_not_set(self, _, tee):
-        """Tests the Tee Transformer is not added when not needed."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'transformers': None}
-
-        monsoon.measure_power(1, output_path=None, **unimportant_kwargs)
-
-        self.assertFalse(
-            tee.called,
-            'A Tee Transformer should not have been created for measure_power '
-            'without an output_path.')
-
-    @mock.patch(TEE_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_tee_is_added_to_assembly_line(
-            self, assembly_line, tee):
-        """Tests Tee is added to the assembly line with the correct path."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'transformers': None}
-
-        monsoon.measure_power(1, output_path='foo', **unimportant_kwargs)
-
-        tee.assert_called_once_with('foo', 0)
-        # Assert Tee() is the first element within the assembly into calls.
-        self.assertEqual(assembly_line().into.call_args_list[0][ARGS][0],
-                         tee())
-
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_transformers_are_added(self, assembly_line):
-        """Tests additional transformers are added to the assembly line."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'output_path': None}
-        expected_transformers = [mock.Mock(), mock.Mock()]
-
-        monsoon.measure_power(
-            1, transformers=expected_transformers, **unimportant_kwargs)
-
-        self.assertEqual(expected_transformers[0],
-                         assembly_line().into.call_args_list[-2][ARGS][0])
-        self.assertEqual(expected_transformers[1],
-                         assembly_line().into.call_args_list[-1][ARGS][0])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/monsoon_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/monsoon_test.py
deleted file mode 100755
index d40f467..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/monsoon_test.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-from antlion.controllers.monsoon_lib.api.common import PASSTHROUGH_STATES
-from antlion.controllers.monsoon_lib.api.common import PassthroughStates
-from antlion.controllers.monsoon_lib.api.monsoon import BaseMonsoon
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-STILL_TIME_LEFT = 0
-OUT_OF_TIME = 9001
-
-
-class MonsoonImpl(BaseMonsoon):
-    MIN_VOLTAGE = 1.5
-    MAX_VOLTAGE = 3.0
-
-    set_voltage = mock.Mock()
-    release_monsoon_connection = mock.Mock()
-    establish_monsoon_connection = mock.Mock()
-
-    def _set_usb_passthrough_mode(self, value):
-        self.__usb_passthrough_mode = value
-
-    def __init__(self):
-        super().__init__()
-        self.__usb_passthrough_mode = None
-
-    @property
-    def status(self):
-        class StatusPacket(object):
-            def __init__(self, passthrough_mode):
-                self.usbPassthroughMode = (
-                    passthrough_mode
-                    if passthrough_mode in PASSTHROUGH_STATES.values() else
-                    PASSTHROUGH_STATES.get(passthrough_mode, None))
-
-        return StatusPacket(self.__usb_passthrough_mode)
-
-
-class BaseMonsoonTest(unittest.TestCase):
-    """Tests antlion.controllers.monsoon_lib.api.monsoon.Monsoon."""
-
-    def setUp(self):
-        self.sleep_patch = mock.patch('time.sleep')
-        self.sleep_patch.start()
-        MonsoonImpl.set_voltage = mock.Mock()
-        MonsoonImpl.release_monsoon_connection = mock.Mock()
-        MonsoonImpl.establish_monsoon_connection = mock.Mock()
-
-    def tearDown(self):
-        self.sleep_patch.stop()
-
-    def test_get_closest_valid_voltage_returns_zero_when_low(self):
-        voltage_to_round_to_zero = MonsoonImpl.MIN_VOLTAGE / 2 - 0.1
-        self.assertEqual(
-            MonsoonImpl.get_closest_valid_voltage(voltage_to_round_to_zero), 0)
-
-    def test_get_closest_valid_voltage_snaps_to_min_when_low_but_close(self):
-        voltage_to_round_to_min = MonsoonImpl.MIN_VOLTAGE / 2 + 0.1
-        self.assertEqual(
-            MonsoonImpl.get_closest_valid_voltage(voltage_to_round_to_min),
-            MonsoonImpl.MIN_VOLTAGE)
-
-    def test_get_closest_valid_voltage_snaps_to_max_when_high(self):
-        voltage_to_round_to_max = MonsoonImpl.MAX_VOLTAGE * 2
-        self.assertEqual(
-            MonsoonImpl.get_closest_valid_voltage(voltage_to_round_to_max),
-            MonsoonImpl.MAX_VOLTAGE)
-
-    def test_get_closest_valid_voltage_to_not_round(self):
-        valid_voltage = (MonsoonImpl.MAX_VOLTAGE + MonsoonImpl.MIN_VOLTAGE) / 2
-
-        self.assertEqual(
-            MonsoonImpl.get_closest_valid_voltage(valid_voltage),
-            valid_voltage)
-
-    def test_is_voltage_valid_voltage_is_valid(self):
-        valid_voltage = (MonsoonImpl.MAX_VOLTAGE + MonsoonImpl.MIN_VOLTAGE) / 2
-
-        self.assertTrue(MonsoonImpl.is_voltage_valid(valid_voltage))
-
-    def test_is_voltage_valid_voltage_is_not_valid(self):
-        invalid_voltage = MonsoonImpl.MIN_VOLTAGE - 2
-
-        self.assertFalse(MonsoonImpl.is_voltage_valid(invalid_voltage))
-
-    def test_validate_voltage_voltage_is_valid(self):
-        valid_voltage = (MonsoonImpl.MAX_VOLTAGE + MonsoonImpl.MIN_VOLTAGE) / 2
-
-        MonsoonImpl.validate_voltage(valid_voltage)
-
-    def test_validate_voltage_voltage_is_not_valid(self):
-        invalid_voltage = MonsoonImpl.MIN_VOLTAGE - 2
-
-        with self.assertRaises(MonsoonError):
-            MonsoonImpl.validate_voltage(invalid_voltage)
-
-    def test_set_voltage_safe_rounds_unsafe_voltage(self):
-        invalid_voltage = MonsoonImpl.MIN_VOLTAGE - .1
-        monsoon = MonsoonImpl()
-
-        monsoon.set_voltage_safe(invalid_voltage)
-
-        monsoon.set_voltage.assert_called_once_with(MonsoonImpl.MIN_VOLTAGE)
-
-    def test_set_voltage_safe_does_not_round_safe_voltages(self):
-        valid_voltage = (MonsoonImpl.MAX_VOLTAGE + MonsoonImpl.MIN_VOLTAGE) / 2
-        monsoon = MonsoonImpl()
-
-        monsoon.set_voltage_safe(valid_voltage)
-
-        monsoon.set_voltage.assert_called_once_with(valid_voltage)
-
-    def test_ramp_voltage_sets_vout_to_final_value(self):
-        """Tests the desired end voltage is set."""
-        monsoon = MonsoonImpl()
-        expected_value = monsoon.MIN_VOLTAGE
-
-        monsoon.ramp_voltage(0, expected_value)
-
-        self.assertEqual(
-            MonsoonImpl.set_voltage.call_args_list[-1][ARGS][0],
-            expected_value, 'The last call to setVout() was not the expected '
-            'final value.')
-
-    def test_ramp_voltage_ramps_voltage_over_time(self):
-        """Tests that voltage increases between each call."""
-        monsoon = MonsoonImpl()
-
-        difference = (MonsoonImpl.VOLTAGE_RAMP_RATE *
-                      MonsoonImpl.VOLTAGE_RAMP_TIME_STEP * 5)
-        monsoon.ramp_voltage(MonsoonImpl.MIN_VOLTAGE,
-                             MonsoonImpl.MIN_VOLTAGE + difference)
-
-        previous_voltage = 0
-        for set_voltage_call in MonsoonImpl.set_voltage.call_args_list:
-            self.assertGreaterEqual(
-                set_voltage_call[ARGS][0], previous_voltage,
-                'ramp_voltage does not always increment voltage.')
-            previous_voltage = set_voltage_call[ARGS][0]
-
-    def test_usb_accepts_passthrough_state_sets_with_str(self):
-        monsoon = MonsoonImpl()
-        state_string = 'on'
-
-        monsoon.usb(state_string)
-
-        self.assertEqual(monsoon.status.usbPassthroughMode,
-                         PASSTHROUGH_STATES[state_string])
-
-    def test_usb_accepts_passthrough_state_sets_with_int_value(self):
-        monsoon = MonsoonImpl()
-
-        monsoon.usb(1)
-
-        self.assertEqual(monsoon.status.usbPassthroughMode, 1)
-
-    def test_usb_raises_on_invalid_str_value(self):
-        monsoon = MonsoonImpl()
-
-        with self.assertRaises(ValueError):
-            monsoon.usb('DEADBEEF')
-
-    def test_usb_raises_on_invalid_int_value(self):
-        monsoon = MonsoonImpl()
-
-        with self.assertRaises(ValueError):
-            monsoon.usb(9001)
-
-    @mock.patch('time.time')
-    def test_usb_raises_timeout_error(self, time):
-        monsoon = MonsoonImpl()
-        time.side_effect = [STILL_TIME_LEFT, OUT_OF_TIME]
-
-        with self.assertRaises(TimeoutError):
-            monsoon.usb(1)
-
-    def test_usb_does_not_set_passthrough_mode_if_unchanged(self):
-        """Tests that the passthrough mode is not reset if it is unchanged."""
-        monsoon = MonsoonImpl()
-        existing_state = PassthroughStates.ON
-        monsoon._set_usb_passthrough_mode(existing_state)
-        monsoon._set_usb_passthrough_mode = mock.Mock()
-
-        monsoon.usb(existing_state)
-
-        self.assertFalse(
-            monsoon._set_usb_passthrough_mode.called,
-            'usbPassthroughMode should not be called when the '
-            'state does not change.')
-
-    def take_samples_always_reestablishes_the_monsoon_connection(self):
-        monsoon = MonsoonImpl()
-        assembly_line = mock.Mock()
-        assembly_line.run.side_effect = Exception('Some Terrible error')
-
-        monsoon.take_samples(assembly_line)
-
-        self.assertTrue(monsoon.establish_monsoon_connection.called)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/assembly_line_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/assembly_line_test.py
deleted file mode 100755
index 7afd5ae..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/assembly_line_test.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import AssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import DevNullBufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import IndexedBuffer
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ProcessAssemblyLine
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLine
-
-ASSEMBLY_LINE_MODULE = (
-    'antlion.controllers.monsoon_lib.sampling.engine.assembly_line')
-
-
-def mock_import(full_module_name, import_name):
-    return mock.patch('%s.%s' % (full_module_name, import_name))
-
-
-class ProcessAssemblyLineTest(unittest.TestCase):
-    """Tests the basic functionality of ProcessAssemblyLine."""
-
-    @mock.patch('multiprocessing.Pool')
-    def test_run_no_nodes(self, pool_mock):
-        """Tests run() with no nodes does not spawn a new process."""
-        empty_node_list = []
-        assembly_line = ProcessAssemblyLine(empty_node_list)
-
-        assembly_line.run()
-
-        self.assertFalse(pool_mock().__enter__().apply_async.called)
-
-    @mock.patch('multiprocessing.Pool')
-    def test_run_spawns_new_process_for_each_node(self, pool_mock):
-        """Tests run() with a node spawns a new process for each node."""
-        node_list = [mock.Mock(), mock.Mock()]
-        assembly_line = ProcessAssemblyLine(node_list)
-
-        assembly_line.run()
-
-        apply_async = pool_mock().apply_async
-        self.assertEqual(len(node_list), apply_async.call_count)
-        for node in node_list:
-            apply_async.assert_any_call(node.transformer.transform,
-                                        [node.input_stream])
-
-
-class ThreadAssemblyLineTest(unittest.TestCase):
-    """Tests the basic functionality of ThreadAssemblyLine."""
-
-    @mock_import(ASSEMBLY_LINE_MODULE, 'ThreadPoolExecutor')
-    def test_run_no_nodes(self, pool_mock):
-        """Tests run() with no nodes does not spawn a new thread."""
-        empty_node_list = []
-        assembly_line = ThreadAssemblyLine(empty_node_list)
-
-        assembly_line.run()
-
-        self.assertFalse(pool_mock().__enter__().submit.called)
-
-    @mock_import(ASSEMBLY_LINE_MODULE, 'ThreadPoolExecutor')
-    def test_run_spawns_new_thread_for_each_node(self, pool_mock):
-        """Tests run() with a node spawns a new thread for each node."""
-        node_list = [mock.Mock(), mock.Mock()]
-        assembly_line = ThreadAssemblyLine(node_list)
-
-        assembly_line.run()
-
-        submit = pool_mock().__enter__().submit
-        self.assertEqual(len(node_list), submit.call_count)
-        for node in node_list:
-            submit.assert_any_call(node.transformer.transform,
-                                   node.input_stream)
-
-
-class AssemblyLineBuilderTest(unittest.TestCase):
-    """Tests the basic functionality of AssemblyLineBuilder."""
-
-    def test_source_raises_if_nodes_already_in_assembly_line(self):
-        """Tests a ValueError is raised if a node already exists."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        first_source = mock.Mock()
-        second_source = mock.Mock()
-        builder.source(first_source)
-
-        with self.assertRaises(ValueError) as context:
-            builder.source(second_source)
-
-        self.assertIn('single source', context.exception.args[0])
-
-    def test_source_sets_input_stream_from_given_stream(self):
-        """Tests source() sets input_stream from args."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        input_stream = mock.Mock()
-        dummy_source = mock.Mock()
-
-        builder.source(dummy_source, input_stream=input_stream)
-
-        self.assertEqual(input_stream, builder.nodes[-1].input_stream)
-
-    def test_source_creates_a_new_input_stream(self):
-        """Tests source() takes in DevNullBufferStream when None is provided."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        dummy_source = mock.Mock()
-
-        builder.source(dummy_source)
-
-        self.assertIsInstance(builder.nodes[-1].input_stream,
-                              DevNullBufferStream)
-
-    def test_source_returns_self(self):
-        """Tests source() returns the builder."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-
-        return_value = builder.source(mock.Mock())
-
-        self.assertEqual(return_value, builder)
-
-    def test_into_raises_value_error_if_source_not_called_yet(self):
-        """Tests a ValueError is raised if into() is called before source()."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        dummy_transformer = mock.Mock()
-
-        with self.assertRaises(ValueError) as context:
-            builder.into(dummy_transformer)
-
-        self.assertIn('source', context.exception.args[0])
-
-    def test_into_raises_value_error_if_already_built(self):
-        """Tests a ValueError is raised into() is called after build()."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        dummy_transformer = mock.Mock()
-        # Build before trying to add more nodes.
-        builder.source(dummy_transformer).build()
-
-        with self.assertRaises(ValueError) as context:
-            builder.into(dummy_transformer)
-
-        self.assertIn('built', context.exception.args[0])
-
-    def test_into_appends_transformer_to_node_list(self):
-        """Tests into() appends the transformer to the end of the node list."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        dummy_transformer = mock.Mock()
-        dummy_source = mock.Mock()
-        builder.source(dummy_source)
-
-        builder.into(dummy_transformer)
-
-        self.assertEqual(dummy_transformer, builder.nodes[-1].transformer)
-
-    def test_into_sets_output_stream_to_newly_created_stream(self):
-        """Tests into() sets the input_stream queue to the newly created one."""
-        queue_generator = mock.Mock()
-        builder = AssemblyLineBuilder(queue_generator, mock.Mock())
-        dummy_transformer = mock.Mock()
-        dummy_source = mock.Mock()
-        builder.source(dummy_source)
-
-        builder.into(dummy_transformer)
-
-        self.assertEqual(queue_generator(),
-                         builder.nodes[-1].input_stream._buffer_queue)
-
-    def test_into_returns_self(self):
-        """Tests into() returns the builder."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        builder.source(mock.Mock())
-
-        return_value = builder.into(mock.Mock())
-
-        self.assertEqual(return_value, builder)
-
-    def test_build_raises_if_already_built(self):
-        """Tests build() raises ValueError if build() was already called."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        builder.source(mock.Mock()).build()
-
-        with self.assertRaises(ValueError) as context:
-            builder.build()
-
-        self.assertIn('already built', context.exception.args[0])
-
-    def test_build_raises_if_no_source_has_been_set(self):
-        """Tests build() raises when there's nothing to build."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-
-        with self.assertRaises(ValueError) as context:
-            builder.build()
-
-        self.assertIn('empty', context.exception.args[0])
-
-    def test_build_properly_sets_output_stream(self):
-        """Tests build() passes the output_stream to the AssemblyLine."""
-        given_output_stream = 1
-
-        assembly_line_generator = mock.Mock()
-        builder = AssemblyLineBuilder(mock.Mock(), assembly_line_generator)
-        builder.source(mock.Mock())
-
-        builder.build(output_stream=given_output_stream)
-
-        self.assertEqual(
-            assembly_line_generator.call_args[0][0][-1].output_stream,
-            given_output_stream)
-
-    def test_build_generates_dev_null_stream_by_default(self):
-        """Tests build() uses DevNullBufferStream when no output_stream."""
-        assembly_line_generator = mock.Mock()
-        builder = AssemblyLineBuilder(mock.Mock(), assembly_line_generator)
-        builder.source(mock.Mock())
-
-        builder.build()
-
-        self.assertIsInstance(
-            assembly_line_generator.call_args[0][0][-1].output_stream,
-            DevNullBufferStream)
-
-
-class IndexedBufferTest(unittest.TestCase):
-    """Tests the IndexedBuffer class."""
-
-    def test_create_indexed_buffer_uses_existing_list(self):
-        my_list = [0, 1, 2, 3, 4, 5]
-        self.assertEqual(IndexedBuffer(0, my_list).buffer, my_list)
-
-    def test_create_indexed_buffer_creates_buffer_when_given_a_size(self):
-        buffer_len = 10
-        self.assertEqual(len(IndexedBuffer(0, buffer_len).buffer), buffer_len)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/calibration_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/calibration_test.py
deleted file mode 100755
index 9122d6b..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/calibration_test.py
+++ /dev/null
@@ -1,165 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import statistics
-import unittest
-from collections import deque
-
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationError
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationScalars
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationWindows
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-
-# These values don't really matter.
-C = Channel.MAIN
-O = Origin.ZERO
-G = Granularity.FINE
-C2 = Channel.USB
-O2 = Origin.REFERENCE
-G2 = Granularity.COARSE
-
-
-class CalibrationWindowsTest(unittest.TestCase):
-    """Unit tests the CalibrationWindows class."""
-
-    def setUp(self):
-        # Here, we set up CalibrationWindows with a single dict entry so we can
-        # add values to the window. Normally, a child class is responsible for
-        # setting the keys of the CalibrationWindows object.
-        self.calibration_windows = CalibrationWindows(
-            calibration_window_size=5)
-        self.calibration_windows._calibrations[(C, O, G)] = deque()
-
-    def test_add_adds_new_value_to_end_of_window(self):
-        """Tests add() appends the new value to the end of the window."""
-        self.calibration_windows.add(C, O, G, 0)
-        self.calibration_windows.add(C, O, G, 1)
-        self.calibration_windows.add(C, O, G, 2)
-
-        expected_value = 3
-
-        self.calibration_windows.add(C, O, G, expected_value)
-
-        self.assertEqual(expected_value,
-                         self.calibration_windows._calibrations[(C, O, G)][-1])
-
-    def test_add_removes_stale_values(self):
-        """Tests add() removes values outside of the calibration window."""
-        value_to_remove = 0
-        new_values = range(1, 6)
-
-        self.calibration_windows.add(C, O, G, value_to_remove)
-        for new_value in new_values:
-            self.calibration_windows.add(C, O, G, new_value)
-
-        self.assertNotIn(value_to_remove,
-                         self.calibration_windows._calibrations[(C, O, G)])
-
-    def test_get_averages_items_within_window(self):
-        """tests get() returns the average of all values within the window."""
-        values = range(5)
-        expected_value = statistics.mean(values)
-
-        for value in values:
-            self.calibration_windows.add(C, O, G, value)
-
-        self.assertEqual(self.calibration_windows.get(C, O, G), expected_value)
-
-    def test_get_raises_error_when_calibration_is_not_complete(self):
-        """Tests get() raises CalibrationError when the window is not full."""
-        values = range(4)
-        for value in values:
-            self.calibration_windows.add(C, O, G, value)
-
-        with self.assertRaises(CalibrationError):
-            self.calibration_windows.get(C, O, G)
-
-
-class CalibrationScalarsTest(unittest.TestCase):
-    """Unit tests the CalibrationScalars class."""
-
-    def setUp(self):
-        # Here, we set up CalibrationScalars with a single dict entry so we can
-        # add values to the window. Normally, a child class is responsible for
-        # setting the keys of the CalibrationScalars object.
-        self.calibration_scalars = CalibrationScalars()
-        # Use a non-integer value so unit tests will fail when a bug occurs.
-        self.calibration_scalars._calibrations[(C, O, G)] = None
-
-    def test_get_returns_last_added_scalar(self):
-        """Tests the value added is the value returned from get()."""
-        ignored_value = 2.71828
-        expected_value = 3.14159
-
-        self.calibration_scalars.add(C, O, G, ignored_value)
-        self.calibration_scalars.add(C, O, G, expected_value)
-
-        self.assertEqual(expected_value, self.calibration_scalars.get(C, O, G))
-
-
-class CalibrationSnapshotTest(unittest.TestCase):
-    """Unit tests the CalibrationSnapshot class."""
-
-    def test_all_keys_are_copied_to_snapshot(self):
-        """Tests that all keys from passed-in collection are copied."""
-        base_calibration = CalibrationScalars()
-        base_calibration._calibrations = {
-            (C, O, G): 2.71828,
-            (C2, O2, G2): 3.14159,
-        }
-
-        calibration_snapshot = CalibrationSnapshot(base_calibration)
-
-        self.assertSetEqual(
-            set(base_calibration.get_keys()),
-            set(calibration_snapshot.get_keys()))
-
-    def test_init_raises_value_error_upon_illegal_arguments(self):
-        """Tests __init__() raises ValueError if the argument is invalid."""
-        with self.assertRaises(ValueError):
-            CalibrationSnapshot({'illegal': 'dictionary'})
-
-    def test_calibration_error_surfaced_on_get(self):
-        """Tests get() raises a CalibrationError if the snapshotted collection
-        had a CalibrationError.
-        """
-        base_calibration = CalibrationScalars()
-        base_calibration._calibrations = {
-            (C, O, G): CalibrationError('raise me')
-        }
-
-        calibration_snapshot = CalibrationSnapshot(base_calibration)
-
-        with self.assertRaises(CalibrationError):
-            calibration_snapshot.get(C, O, G)
-
-    def test_calibration_copied_upon_snapshot_created(self):
-        """Tests the calibration value is snapshotted."""
-        expected_value = 5
-        unexpected_value = 10
-        base_calibration = CalibrationScalars()
-        base_calibration._calibrations = {(C, O, G): expected_value}
-
-        calibration_snapshot = CalibrationSnapshot(base_calibration)
-        base_calibration.add(C, O, G, unexpected_value)
-
-        self.assertEqual(calibration_snapshot.get(C, O, G), expected_value)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformer_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformer_test.py
deleted file mode 100755
index dafa1e1..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformer_test.py
+++ /dev/null
@@ -1,268 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import IndexedBuffer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SourceTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import Transformer
-
-# The indexes of the arguments returned in Mock's call lists.
-ARGS = 0
-KWARGS = 1
-
-
-class TransformerImpl(Transformer):
-    """A basic implementation of a Transformer object."""
-
-    def __init__(self):
-        super().__init__()
-        self.actions = []
-
-    def on_begin(self):
-        self.actions.append('begin')
-
-    def on_end(self):
-        self.actions.append('end')
-
-    def _transform(self, _):
-        self.actions.append('transform')
-
-
-def raise_exception(tipe=Exception):
-    def exception_raiser():
-        raise tipe()
-
-    return exception_raiser
-
-
-class TransformerTest(unittest.TestCase):
-    """Tests the Transformer class."""
-
-    def test_transform_calls_functions_in_order(self):
-        """Tests transform() calls functions in the correct arrangement."""
-        my_transformer = TransformerImpl()
-
-        my_transformer.transform(mock.Mock())
-
-        self.assertEqual(['begin', 'transform', 'end'], my_transformer.actions)
-
-    def test_transform_initializes_input_stream(self):
-        """Tests transform() initializes the input_stream before beginning."""
-        input_stream = mock.Mock()
-        transformer = TransformerImpl()
-        # Purposely fail before sending any data
-        transformer.on_begin = raise_exception(Exception)
-
-        with self.assertRaises(Exception):
-            transformer.transform(input_stream)
-
-        # Asserts initialize was called before on_begin.
-        self.assertTrue(input_stream.initialize.called)
-
-    def test_transform_initializes_output_stream(self):
-        """Tests transform() initializes the output_stream before beginning."""
-        output_stream = mock.Mock()
-        transformer = TransformerImpl()
-        transformer.set_output_stream(output_stream)
-        # Purposely fail before sending any data
-        transformer.on_begin = raise_exception(Exception)
-
-        with self.assertRaises(Exception):
-            transformer.transform(mock.Mock())
-
-        # Asserts initialize was called before on_begin.
-        self.assertTrue(output_stream.initialize.called)
-
-
-class SourceTransformerTest(unittest.TestCase):
-    """Tests the SourceTransformer class."""
-
-    def test_transform_ends_on_buffer_stream_end(self):
-        """Tests transformation ends on stream end."""
-        source_transformer = SourceTransformer()
-        source_transformer.set_output_stream(mock.Mock())
-        transform_buffer = mock.Mock(side_effect=[BufferStream.END])
-        source_transformer._transform_buffer = transform_buffer
-
-        output_stream = mock.Mock()
-        source_transformer.transform(output_stream)
-
-        self.assertFalse(output_stream.add_indexed_buffer.called)
-
-    def test_transform_adds_transformed_index_buffer(self):
-        source_transformer = SourceTransformer()
-        output_stream = mock.Mock()
-        source_transformer.set_output_stream(output_stream)
-        expected_buffer = [0, 1, 2]
-        transform_buffer = mock.Mock(
-            side_effect=[expected_buffer, BufferStream.END])
-        source_transformer._transform_buffer = transform_buffer
-
-        source_transformer.transform(mock.Mock())
-
-        self.assertEqual(
-            expected_buffer,
-            output_stream.add_indexed_buffer.call_args[ARGS][0].buffer)
-
-    def test_transform_increases_buffer_index_each_call(self):
-        source_transformer = SourceTransformer()
-        output_stream = mock.Mock()
-        source_transformer.set_output_stream(output_stream)
-        buffer = [0, 1, 2]
-        transform_buffer = mock.Mock(
-            side_effect=[buffer, buffer, buffer, BufferStream.END])
-        source_transformer._transform_buffer = transform_buffer
-
-        source_transformer.transform(mock.Mock())
-
-        self.assertEqual([0, 1, 2], [
-            output_stream.add_indexed_buffer.call_args_list[i][ARGS][0].index
-            for i in range(output_stream.add_indexed_buffer.call_count)
-        ])
-
-    def test_transform_calls_end_stream(self):
-        source_transformer = SourceTransformer()
-        output_stream = mock.Mock()
-        source_transformer.set_output_stream(output_stream)
-        transform_buffer = mock.Mock(side_effect=[BufferStream.END])
-        source_transformer._transform_buffer = transform_buffer
-
-        source_transformer.transform(mock.Mock())
-
-        self.assertTrue(output_stream.end_stream.called)
-
-
-class SequentialTransformerTest(unittest.TestCase):
-    """Unit tests the SequentialTransformer class."""
-
-    def test_send_buffers_updates_next_index_on_buffer_list(self):
-        sequential_transformer = SequentialTransformer()
-        sequential_transformer._next_index = 10
-        expected_next_index = 15
-
-        sequential_transformer._send_buffers(BufferList([[]] * 5))
-
-        self.assertEqual(expected_next_index,
-                         sequential_transformer._next_index)
-
-    def test_send_buffers_updates_next_index_on_single_buffer(self):
-        sequential_transformer = SequentialTransformer()
-        sequential_transformer._next_index = 10
-        expected_next_index = 11
-
-        sequential_transformer._send_buffers([])
-
-        self.assertEqual(expected_next_index,
-                         sequential_transformer._next_index)
-
-    def test_send_buffers_sends_buffer_list_with_correct_indexes(self):
-        buffers_to_send = [
-            [1],
-            [1, 2],
-            [1, 2, 3],
-            [1, 2, 3, 4],
-            [1, 2, 3, 4, 5],
-        ]
-        sequential_transformer = SequentialTransformer()
-        output_stream = mock.Mock()
-        sequential_transformer.set_output_stream(output_stream)
-        sequential_transformer._send_buffers(BufferList(buffers_to_send))
-
-        for expected_index, expected_buffer in enumerate(buffers_to_send):
-            call = output_stream.add_indexed_buffer.call_args_list[
-                expected_index]
-            self.assertEqual(expected_index, call[ARGS][0].index)
-            self.assertEqual(expected_buffer, call[ARGS][0].buffer)
-
-    def test_transform_breaks_upon_buffer_stream_end_received(self):
-        sequential_transformer = SequentialTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        sequential_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]
-
-        sequential_transformer._transform(input_stream)
-
-        self.assertFalse(output_stream.add_indexed_buffer.called)
-
-    def test_transform_closes_output_stream_when_finished(self):
-        sequential_transformer = SequentialTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        sequential_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]
-
-        sequential_transformer._transform(input_stream)
-
-        self.assertTrue(output_stream.end_stream.called)
-
-
-class ParallelTransformerTest(unittest.TestCase):
-    """Unit tests the ParallelTransformer class."""
-
-    def test_transform_breaks_upon_buffer_stream_end_received(self):
-        parallel_transformer = ParallelTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        parallel_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]
-
-        parallel_transformer._transform(input_stream)
-
-        self.assertFalse(output_stream.add_indexed_buffer.called)
-
-    def test_transform_closes_output_stream_when_finished(self):
-        parallel_transformer = ParallelTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        parallel_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]
-
-        parallel_transformer._transform(input_stream)
-
-        self.assertTrue(output_stream.end_stream.called)
-
-    def test_transform_passes_indexed_buffer_with_updated_buffer(self):
-        expected_buffer = [0, 1, 2, 3, 4]
-        expected_index = 12345
-        parallel_transformer = ParallelTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        parallel_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [
-            IndexedBuffer(expected_index, []), BufferStream.END
-        ]
-        parallel_transformer._transform_buffer = lambda _: expected_buffer
-
-        parallel_transformer._transform(input_stream)
-
-        self.assertEqual(
-            expected_buffer,
-            output_stream.add_indexed_buffer.call_args_list[0][ARGS][0].buffer)
-        self.assertEqual(
-            expected_index,
-            output_stream.add_indexed_buffer.call_args_list[0][ARGS][0].index)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformers_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformers_test.py
deleted file mode 100755
index 56d34e3..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformers_test.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import statistics
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import DownSampler
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import PerfgateTee
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import SampleAggregator
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import Tee
-from antlion.controllers.monsoon_lib.sampling.hvpm.transformers import HvpmReading
-
-ARGS = 0
-KWARGS = 1
-
-
-class TeeTest(unittest.TestCase):
-    """Unit tests the transformers.Tee class."""
-
-    @mock.patch('builtins.open')
-    def test_begin_opens_file_on_expected_filename(self, open_mock):
-        expected_filename = 'foo'
-
-        Tee(expected_filename).on_begin()
-
-        open_mock.assert_called_with(expected_filename, 'w+')
-
-    @mock.patch('builtins.open')
-    def test_end_closes_file(self, open_mock):
-        tee = Tee('foo')
-        tee.on_begin()
-
-        tee.on_end()
-
-        self.assertTrue(open_mock().close.called)
-
-    @mock.patch('builtins.open')
-    def test_transform_buffer_outputs_correct_format(self, open_mock):
-        tee = Tee('foo')
-        tee.on_begin()
-
-        expected_output = [
-            '0.010000000 1.414213562370\n', '0.020000000 2.718281828460\n',
-            '0.030000000 3.141592653590\n'
-        ]
-
-        tee._transform_buffer([
-            HvpmReading([1.41421356237, 0, 0, 0, 0], 0.01),
-            HvpmReading([2.71828182846, 0, 0, 0, 0], 0.02),
-            HvpmReading([3.14159265359, 0, 0, 0, 0], 0.03),
-        ])
-
-        for call, out in zip(open_mock().write.call_args_list,
-                             expected_output):
-            self.assertEqual(call[ARGS][0], out)
-
-
-class PerfgateTeeTest(unittest.TestCase):
-    """Unit tests the transformers.PerfgateTee class."""
-
-    @mock.patch('builtins.open')
-    def test_begin_opens_file_on_expected_filename(self, open_mock):
-        expected_filename = 'foo'
-
-        PerfgateTee(expected_filename).on_begin()
-
-        open_mock.assert_called_with(expected_filename, 'w+')
-
-    @mock.patch('builtins.open')
-    def test_end_closes_file(self, open_mock):
-        tee = PerfgateTee('foo')
-        tee.on_begin()
-
-        tee.on_end()
-
-        self.assertTrue(open_mock().close.called)
-
-    @mock.patch('builtins.open')
-    def test_transform_buffer_outputs_correct_format(self, open_mock):
-        tee = PerfgateTee('foo')
-        tee.on_begin()
-
-        expected_output = [
-            '1596149635552503296,0.000223,4.193050\n',
-            '1596149635562476032,0.000212,4.193190\n',
-            '1596149635572549376,0.000225,4.193135\n',
-        ]
-
-        tee._transform_buffer([
-            HvpmReading([0.000223, 0, 0, 4.193050, 0], 1596149635.552503296),
-            HvpmReading([0.000212, 0, 0, 4.193190, 0], 1596149635.562476032),
-            HvpmReading([0.000225, 0, 0, 4.193135, 0], 1596149635.572549376),
-        ])
-
-        for call, out in zip(open_mock().write.call_args_list,
-                             expected_output):
-            self.assertEqual(call[ARGS][0], out)
-
-
-class SampleAggregatorTest(unittest.TestCase):
-    """Unit tests the transformers.SampleAggregator class."""
-
-    def test_transform_buffer_respects_start_after_seconds_flag(self):
-        sample_aggregator = SampleAggregator(start_after_seconds=1.0)
-        sample_aggregator._transform_buffer([
-            HvpmReading([1.41421356237, 0, 0, 0, 0], 0.00),
-            HvpmReading([2.71828182846, 0, 0, 0, 0], 0.99),
-            HvpmReading([3.14159265359, 0, 0, 0, 0], 1.00),
-        ])
-
-        self.assertEqual(sample_aggregator.num_samples, 1)
-        self.assertEqual(sample_aggregator.sum_currents, 3.14159265359)
-
-    def test_transform_buffer_sums_currents(self):
-        sample_aggregator = SampleAggregator()
-        sample_aggregator._transform_buffer([
-            HvpmReading([1.41421356237, 0, 0, 0, 0], 0.01),
-            HvpmReading([2.71828182846, 0, 0, 0, 0], 0.99),
-            HvpmReading([3.14159265359, 0, 0, 0, 0], 1.00),
-        ])
-
-        self.assertEqual(sample_aggregator.num_samples, 3)
-        self.assertAlmostEqual(sample_aggregator.sum_currents, 7.27408804442)
-
-
-class DownSamplerTest(unittest.TestCase):
-    """Unit tests the DownSampler class."""
-
-    def test_transform_buffer_downsamples_without_leftovers(self):
-        downsampler = DownSampler(2)
-        buffer = [
-            HvpmReading([2, 0, 0, 0, 0], .01),
-            HvpmReading([4, 0, 0, 0, 0], .03),
-            HvpmReading([6, 0, 0, 0, 0], .05),
-            HvpmReading([8, 0, 0, 0, 0], .07),
-            HvpmReading([10, 0, 0, 0, 0], .09),
-            HvpmReading([12, 0, 0, 0, 0], .011),
-        ]
-
-        values = downsampler._transform_buffer(buffer)
-
-        self.assertEqual(len(values), len(buffer) / 2)
-        for i, down_sample in enumerate(values):
-            self.assertAlmostEqual(
-                down_sample.main_current,
-                ((buffer[2 * i] + buffer[2 * i + 1]) / 2).main_current)
-
-    def test_transform_stores_unused_values_in_leftovers(self):
-        downsampler = DownSampler(3)
-        buffer = [
-            HvpmReading([2, 0, 0, 0, 0], .01),
-            HvpmReading([4, 0, 0, 0, 0], .03),
-            HvpmReading([6, 0, 0, 0, 0], .05),
-            HvpmReading([8, 0, 0, 0, 0], .07),
-            HvpmReading([10, 0, 0, 0, 0], .09),
-        ]
-
-        downsampler._transform_buffer(buffer)
-
-        self.assertEqual(len(downsampler._leftovers), 2)
-        self.assertIn(buffer[-2], downsampler._leftovers)
-        self.assertIn(buffer[-1], downsampler._leftovers)
-
-    def test_transform_uses_leftovers_on_next_calculation(self):
-        downsampler = DownSampler(3)
-        starting_leftovers = [
-            HvpmReading([2, 0, 0, 0, 0], .01),
-            HvpmReading([4, 0, 0, 0, 0], .03),
-        ]
-        downsampler._leftovers = starting_leftovers
-        buffer = [
-            HvpmReading([6, 0, 0, 0, 0], .05),
-            HvpmReading([8, 0, 0, 0, 0], .07),
-            HvpmReading([10, 0, 0, 0, 0], .09),
-            HvpmReading([12, 0, 0, 0, 0], .011)
-        ]
-
-        values = downsampler._transform_buffer(buffer)
-
-        self.assertEqual(len(values), 2)
-        self.assertNotIn(starting_leftovers[0], downsampler._leftovers)
-        self.assertNotIn(starting_leftovers[1], downsampler._leftovers)
-
-        self.assertAlmostEqual(
-            values[0].main_current,
-            statistics.mean([
-                starting_leftovers[0].main_current,
-                starting_leftovers[1].main_current,
-                buffer[0].main_current,
-            ]))
-        self.assertAlmostEqual(
-            values[1].main_current,
-            statistics.mean([
-                buffer[1].main_current,
-                buffer[2].main_current,
-                buffer[3].main_current,
-            ]))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_test.py b/src/antlion/unit_tests/controllers/monsoon_test.py
deleted file mode 100755
index 858ac3a..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_test.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers import monsoon
-from antlion.controllers.monsoon_lib.api.hvpm.monsoon import Monsoon as HvpmMonsoon
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon import Monsoon as LvpmStockMonsoon
-
-
-@mock.patch('antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon.MonsoonProxy')
-@mock.patch('antlion.controllers.monsoon_lib.api.hvpm.monsoon.HVPM')
-class MonsoonTest(unittest.TestCase):
-    """Tests the antlion.controllers.iperf_client module functions."""
-    def test_create_can_create_lvpm_from_id_only(self, *_):
-        monsoons = monsoon.create([12345])
-        self.assertIsInstance(monsoons[0], LvpmStockMonsoon)
-
-    def test_create_can_create_lvpm_from_dict(self, *_):
-        monsoons = monsoon.create([{'type': 'LvpmStockMonsoon', 'serial': 10}])
-        self.assertIsInstance(monsoons[0], LvpmStockMonsoon)
-        self.assertEqual(monsoons[0].serial, 10)
-
-    def test_create_can_create_hvpm_from_id_only(self, *_):
-        monsoons = monsoon.create([23456])
-        self.assertIsInstance(monsoons[0], HvpmMonsoon)
-
-    def test_create_can_create_hvpm_from_dict(self, *_):
-        monsoons = monsoon.create([{'type': 'HvpmMonsoon', 'serial': 10}])
-        self.assertIsInstance(monsoons[0], HvpmMonsoon)
-        self.assertEqual(monsoons[0].serial, 10)
-
-    def test_raises_error_if_monsoon_type_is_unknown(self, *_):
-        with self.assertRaises(ValueError):
-            monsoon.create([{'type': 'UNKNOWN', 'serial': 10}])
-
-    def test_raises_error_if_monsoon_serial_not_provided(self, *_):
-        with self.assertRaises(ValueError):
-            monsoon.create([{'type': 'LvpmStockMonsoon'}])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py b/src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
index 3cd0b85..e5ee543 100644
--- a/src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
+++ b/src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
@@ -22,85 +22,82 @@
 from antlion.controllers.pdu_lib.synaccess.np02b import _TNHelperNP02B, PduDevice
 
 # Test Constants
-HOST = '192.168.1.2'
-VALID_COMMAND_STR = 'cmd'
-VALID_COMMAND_BYTE_STR = b'cmd\n\r'
-VALID_RESPONSE_STR = ''
-VALID_RESPONSE_BYTE_STR = b'\n\r\r\n\r\n'
-STATUS_COMMAND_STR = 'pshow'
-STATUS_COMMAND_BYTE_STR = b'pshow\n\r'
+HOST = "192.168.1.2"
+VALID_COMMAND_STR = "cmd"
+VALID_COMMAND_BYTE_STR = b"cmd\n\r"
+VALID_RESPONSE_STR = ""
+VALID_RESPONSE_BYTE_STR = b"\n\r\r\n\r\n"
+STATUS_COMMAND_STR = "pshow"
+STATUS_COMMAND_BYTE_STR = b"pshow\n\r"
 STATUS_RESPONSE_STR = (
-    'Port | Name    |Status   1 |    Outlet1 |   OFF|   2 |    Outlet2 |   ON |'
+    "Port | Name    |Status   1 |    Outlet1 |   OFF|   2 |    Outlet2 |   ON |"
 )
 STATUS_RESPONSE_BYTE_STR = (
-    b'Port | Name    |Status   1 |    Outlet1 |   OFF|   2 |    Outlet2 |   '
-    b'ON |\n\r\r\n\r\n')
-INVALID_COMMAND_OUTPUT_BYTE_STR = b'Invalid Command\n\r\r\n\r\n>'
-VALID_STATUS_DICT = {'1': False, '2': True}
-INVALID_STATUS_DICT = {'1': False, '2': False}
+    b"Port | Name    |Status   1 |    Outlet1 |   OFF|   2 |    Outlet2 |   "
+    b"ON |\n\r\r\n\r\n"
+)
+INVALID_COMMAND_OUTPUT_BYTE_STR = b"Invalid Command\n\r\r\n\r\n>"
+VALID_STATUS_DICT = {"1": False, "2": True}
+INVALID_STATUS_DICT = {"1": False, "2": False}
 
 
 class _TNHelperNP02BTest(unittest.TestCase):
     """Unit tests for _TNHelperNP02B."""
 
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time.sleep')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.telnetlib')
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
     def test_cmd_is_properly_written(self, telnetlib_mock, sleep_mock):
         """cmd should strip whitespace and encode in ASCII."""
         tnhelper = _TNHelperNP02B(HOST)
-        telnetlib_mock.Telnet().read_until.return_value = (
-            VALID_RESPONSE_BYTE_STR)
+        telnetlib_mock.Telnet().read_until.return_value = VALID_RESPONSE_BYTE_STR
         res = tnhelper.cmd(VALID_COMMAND_STR)
-        telnetlib_mock.Telnet().write.assert_called_with(
-            VALID_COMMAND_BYTE_STR)
+        telnetlib_mock.Telnet().write.assert_called_with(VALID_COMMAND_BYTE_STR)
 
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time.sleep')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.telnetlib')
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
     def test_cmd_valid_command_output_is_properly_parsed(
-            self, telnetlib_mock, sleep_mock):
+        self, telnetlib_mock, sleep_mock
+    ):
         """cmd should strip the prompt, separators and command from the
         output."""
         tnhelper = _TNHelperNP02B(HOST)
-        telnetlib_mock.Telnet().read_until.return_value = (
-            VALID_RESPONSE_BYTE_STR)
+        telnetlib_mock.Telnet().read_until.return_value = VALID_RESPONSE_BYTE_STR
         res = tnhelper.cmd(VALID_COMMAND_STR)
         self.assertEqual(res, VALID_RESPONSE_STR)
 
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time.sleep')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.telnetlib')
-    def test_cmd_status_output_is_properly_parsed(self, telnetlib_mock,
-                                                  sleep_mock):
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
+    def test_cmd_status_output_is_properly_parsed(self, telnetlib_mock, sleep_mock):
         """cmd should strip the prompt, separators and command from the output,
         returning just the status information."""
         tnhelper = _TNHelperNP02B(HOST)
-        telnetlib_mock.Telnet().read_until.return_value = (
-            STATUS_RESPONSE_BYTE_STR)
+        telnetlib_mock.Telnet().read_until.return_value = STATUS_RESPONSE_BYTE_STR
         res = tnhelper.cmd(STATUS_COMMAND_STR)
         self.assertEqual(res, STATUS_RESPONSE_STR)
 
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time.sleep')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.telnetlib')
-    def test_cmd_invalid_command_raises_error(self, telnetlib_mock,
-                                              sleep_mock):
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
+    def test_cmd_invalid_command_raises_error(self, telnetlib_mock, sleep_mock):
         """cmd should raise PduError when an invalid command is given."""
         tnhelper = _TNHelperNP02B(HOST)
         telnetlib_mock.Telnet().read_until.return_value = (
-            INVALID_COMMAND_OUTPUT_BYTE_STR)
+            INVALID_COMMAND_OUTPUT_BYTE_STR
+        )
         with self.assertRaises(PduError):
-            res = tnhelper.cmd('Some invalid command.')
+            res = tnhelper.cmd("Some invalid command.")
 
 
 class NP02BPduDeviceTest(unittest.TestCase):
     """Unit tests for NP02B PduDevice implementation."""
 
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd')
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd")
     def test_status_parses_output_to_valid_dictionary(self, tnhelper_cmd_mock):
         """status should parse helper response correctly into dict."""
         np02b = PduDevice(HOST, None, None)
         tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
         self.assertEqual(np02b.status(), VALID_STATUS_DICT)
 
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd')
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd")
     def test_verify_state_matches_state(self, tnhelper_cmd_mock):
         """verify_state should return true when expected state is a subset of
         actual state"""
@@ -108,8 +105,8 @@
         tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
         self.assertTrue(np02b._verify_state(VALID_STATUS_DICT))
 
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd')
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd")
     def test_verify_state_throws_error(self, tnhelper_cmd_mock, time_mock):
         """verify_state should throw error after timeout when actual state never
         reaches expected state"""
@@ -120,5 +117,5 @@
             self.assertTrue(np02b._verify_state(INVALID_STATUS_DICT))
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/power_metrics_test.py b/src/antlion/unit_tests/controllers/power_metrics_test.py
deleted file mode 100644
index ba47b2f..0000000
--- a/src/antlion/unit_tests/controllers/power_metrics_test.py
+++ /dev/null
@@ -1,187 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import statistics
-import unittest
-from unittest import mock
-from unittest.mock import patch
-
-from antlion.controllers import power_metrics
-from antlion.controllers.power_metrics import CURRENT
-from antlion.controllers.power_metrics import END_TIMESTAMP
-from antlion.controllers.power_metrics import HOUR
-from antlion.controllers.power_metrics import Metric
-from antlion.controllers.power_metrics import MILLIAMP
-from antlion.controllers.power_metrics import MINUTE
-from antlion.controllers.power_metrics import PowerMetrics
-from antlion.controllers.power_metrics import START_TIMESTAMP
-from antlion.controllers.power_metrics import TIME
-from antlion.controllers.power_metrics import WATT
-
-FAKE_UNIT_TYPE = 'fake_unit'
-FAKE_UNIT = 'F'
-
-
-class MeasurementTest(unittest.TestCase):
-    """Unit tests for the Measurement class."""
-
-    def test_init_with_valid_unit_type(self):
-        """Test that a Measurement is properly initialized given a valid unit
-        type.
-        """
-        measurement = Metric(2, CURRENT, MILLIAMP)
-        self.assertEqual(measurement.value, 2)
-        self.assertEqual(measurement.unit, MILLIAMP)
-
-    def test_init_with_invalid_unit_type(self):
-        """Test that __init__ raises an error if given an invalid unit type."""
-        with self.assertRaisesRegex(TypeError, 'valid unit type'):
-            measurement = Metric(2, FAKE_UNIT_TYPE, FAKE_UNIT)
-
-    def test_unit_conversion(self):
-        """Test that to_unit correctly converts value and unit."""
-        ratio = 1000
-        current_amps = Metric.amps(15)
-        current_milliamps = current_amps.to_unit(MILLIAMP)
-        self.assertEqual(current_milliamps.value / current_amps.value, ratio)
-
-    def test_unit_conversion_with_wrong_type(self):
-        """Test that to_unit raises and error if incompatible unit type is
-        specified.
-        """
-        current_amps = Metric.amps(3.4)
-        with self.assertRaisesRegex(TypeError, 'Incompatible units'):
-            power_watts = current_amps.to_unit(WATT)
-
-    def test_comparison_operators(self):
-        """Test that the comparison operators work as intended."""
-        # time_a == time_b < time_c
-        time_a = Metric.seconds(120)
-        time_b = Metric(2, TIME, MINUTE)
-        time_c = Metric(0.1, TIME, HOUR)
-
-        self.assertEqual(time_a, time_b)
-        self.assertEqual(time_b, time_a)
-        self.assertLessEqual(time_a, time_b)
-        self.assertGreaterEqual(time_a, time_b)
-
-        self.assertNotEqual(time_a, time_c)
-        self.assertNotEqual(time_c, time_a)
-        self.assertLess(time_a, time_c)
-        self.assertLessEqual(time_a, time_c)
-        self.assertGreater(time_c, time_a)
-        self.assertGreaterEqual(time_c, time_a)
-
-    def test_arithmetic_operators(self):
-        """Test that the addition and subtraction operators work as intended"""
-        time_a = Metric(3, TIME, HOUR)
-        time_b = Metric(90, TIME, MINUTE)
-
-        sum_ = time_a + time_b
-        self.assertEqual(sum_.value, 4.5)
-        self.assertEqual(sum_.unit, HOUR)
-
-        sum_reversed = time_b + time_a
-        self.assertEqual(sum_reversed.value, 270)
-        self.assertEqual(sum_reversed.unit, MINUTE)
-
-        diff = time_a - time_b
-        self.assertEqual(diff.value, 1.5)
-        self.assertEqual(diff.unit, HOUR)
-
-        diff_reversed = time_b - time_a
-        self.assertEqual(diff_reversed.value, -90)
-        self.assertEqual(diff_reversed.unit, MINUTE)
-
-
-class PowerMetricsTest(unittest.TestCase):
-    """Unit tests for the PowerMetrics class."""
-
-    SAMPLES = [0.13, 0.95, 0.32, 4.84, 2.48, 4.11, 4.85, 4.88, 4.22, 2.2]
-    RAW_DATA = list(zip(range(10), SAMPLES))
-    VOLTAGE = 4.2
-
-    def setUp(self):
-        self.power_metrics = PowerMetrics(self.VOLTAGE)
-
-    def test_import_raw_data(self):
-        """Test that power metrics can be loaded from file. Simply ensure that
-        the number of samples is correct."""
-
-        imported_data = power_metrics.import_raw_data(
-            os.path.join(os.path.dirname(__file__),
-                         'data/sample_monsoon_data')
-        )
-
-        count = 0
-        for _, __ in imported_data:
-            count = count + 1
-        self.assertEqual(count, 10)
-
-    @patch('antlion.controllers.power_metrics.PowerMetrics')
-    def test_split_by_test_with_timestamps(self, mock_power_metric_type):
-        """Test that given test timestamps, a power metric is generated from
-        a subset of samples corresponding to the test."""
-        timestamps = {'sample_test': {START_TIMESTAMP: 3500,
-                                      END_TIMESTAMP:   8500}}
-
-        mock_power_metric = mock.Mock()
-        mock_power_metric_type.side_effect = lambda v: mock_power_metric
-        power_metrics.generate_test_metrics(self.RAW_DATA,
-                                            timestamps=timestamps,
-                                            voltage=self.VOLTAGE)
-
-        self.assertEqual(mock_power_metric.update_metrics.call_count, 5)
-
-    def test_incomplete_timestamps_are_ignored(self):
-        """Test that given incomplete timestamps, a power metric is generated from
-        a subset of samples corresponding to the test."""
-        sample_test = 'sample_test'
-        test_end = 13500
-        test_timestamps = {sample_test: {
-            END_TIMESTAMP: test_end}}
-        # no error expected
-        metrics = (
-            power_metrics.generate_test_metrics(self.RAW_DATA,
-                                                timestamps=test_timestamps,
-                                                voltage=self.VOLTAGE))
-
-
-    def test_numeric_metrics(self):
-        """Test that the numeric metrics have correct values."""
-        timestamps = {'sample_test': {START_TIMESTAMP: 0,
-                                      END_TIMESTAMP:   10000}}
-        metrics = power_metrics.generate_test_metrics(self.RAW_DATA,
-                                                      timestamps=timestamps,
-                                                      voltage=self.VOLTAGE)
-        metrics_as_dic = {m.name: m for m in metrics['sample_test']}
-        self.assertAlmostEqual(metrics_as_dic['avg_current'].value,
-                               statistics.mean(self.SAMPLES) * 1000)
-        self.assertAlmostEqual(metrics_as_dic['max_current'].value,
-                               max(self.SAMPLES) * 1000)
-        self.assertAlmostEqual(metrics_as_dic['min_current'].value,
-                               min(self.SAMPLES) * 1000)
-        self.assertAlmostEqual(
-            metrics_as_dic['stdev_current'].value,
-            statistics.stdev(self.SAMPLES) * 1000)
-        self.assertAlmostEqual(
-            self.power_metrics.avg_power.value,
-            self.power_metrics.avg_current.value * self.VOLTAGE)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/power_monitor_test.py b/src/antlion/unit_tests/controllers/power_monitor_test.py
deleted file mode 100644
index f62e6fa..0000000
--- a/src/antlion/unit_tests/controllers/power_monitor_test.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from antlion.controllers import power_monitor
-
-
-class PowerMonitorTest(unittest.TestCase):
-
-    def test_registry_gets_updated(self):
-        power_monitor.update_registry({'a_key': 'a_value'})
-        self.assertTrue('a_key' in power_monitor.get_registry())
-        self.assertEqual('a_value', power_monitor.get_registry()['a_key'])
-
-    def test_registry_disallows_mutation(self):
-        power_monitor.update_registry({'a_key': 'a_value'})
-        with self.assertRaises(power_monitor.ResourcesRegistryError):
-            power_monitor.update_registry({'a_key': 'another_value'})
-
-    def test_registry_allows_updates_with_same_values(self):
-        power_monitor.update_registry({'a_key': 'a_value'})
-        power_monitor.update_registry({'a_key': 'a_value'})
-
-    def tearDown(self):
-        super().tearDown()
-        power_monitor._REGISTRY = {}
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/rohdeschwarz_lib/__init__.py b/src/antlion/unit_tests/controllers/rohdeschwarz_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/rohdeschwarz_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/rohdeschwarz_lib/contest_test.py b/src/antlion/unit_tests/controllers/rohdeschwarz_lib/contest_test.py
deleted file mode 100644
index 05d57d7..0000000
--- a/src/antlion/unit_tests/controllers/rohdeschwarz_lib/contest_test.py
+++ /dev/null
@@ -1,283 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import logger
-from antlion import asserts
-import unittest
-from unittest import mock
-import socket
-import time
-from contextlib import closing
-
-# TODO(markdr): Remove this hack after adding zeep to setup.py.
-import sys
-sys.modules['zeep'] = mock.Mock()
-
-from antlion.controllers.rohdeschwarz_lib import contest
-
-
-def find_free_port():
-    """ Helper function to find a free port.
-    https://stackoverflow.com/a/45690594
-    """
-    with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
-        s.bind(('', 0))
-        s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
-        return s.getsockname()[1]
-
-
-class ContestTest(unittest.TestCase):
-    """ Unit tests for the contest controller."""
-
-    LOCAL_HOST_IP = '127.0.0.1'
-
-    @classmethod
-    def setUpClass(self):
-        self.log = logger.create_tagged_trace_logger('contest_test')
-
-    def test_automation_server_end_to_end(self):
-        """ End to end test for the Contest object's ability to start an
-        Automation Server and respond to the commands sent through the
-        socket interface. """
-
-        automation_port = find_free_port()
-
-        # Instantiate the mock Contest object. This will start a thread in the
-        # background running the Automation server.
-        with mock.patch('zeep.client.Client') as zeep_client:
-
-            # Create a MagicMock instance
-            zeep_client.return_value = mock.MagicMock()
-
-            controller = contest.Contest(
-                logger=self.log,
-                remote_ip=None,
-                remote_port=None,
-                automation_listen_ip=self.LOCAL_HOST_IP,
-                automation_port=automation_port,
-                dut_on_func=None,
-                dut_off_func=None,
-                ftp_pwd=None,
-                ftp_usr=None)
-
-            # Give some time for the server to initialize as it's running on
-            # a different thread.
-            time.sleep(0.01)
-
-            # Start a socket connection and send a command
-            with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
-                s.connect((self.LOCAL_HOST_IP, automation_port))
-                s.sendall(b'AtTestcaseStart')
-                s.settimeout(1.0)
-                data = s.recv(1024)
-                asserts.assert_true(data == b'OK\n', "Received OK response.")
-
-        controller.destroy()
-
-    def test_automation_protocol_calls_dut_off_func_for_on_command(self):
-        """ Tests the AutomationProtocol's ability to turn the DUT off
-        upon receiving the requests."""
-
-        dut_on_func = mock.Mock()
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), dut_on_func, mock.Mock())
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'DUT_SWITCH_ON')
-        asserts.assert_true(dut_on_func.called, 'Function was not called.')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_calls_dut_on_func_for_off_command(self):
-        """ Tests the Automation server's ability to turn the DUT on
-        upon receiving the requests."""
-
-        dut_off_func = mock.Mock()
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), dut_off_func)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'DUT_SWITCH_OFF')
-        asserts.assert_true(dut_off_func.called, 'Function was not called.')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_handles_testcase_start_command(self):
-        """ Tests the Automation server's ability to handle a testcase start
-        command."""
-
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), None)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'AtTestcaseStart name_of_the_testcase')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_handles_testplan_start_command(self):
-        """ Tests the Automation server's ability to handle a testplan start
-        command."""
-
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), None)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'AtTestplanStart')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_handles_testcase_end_command(self):
-        """ Tests the Automation server's ability to handle a testcase end
-        command."""
-
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), None)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'AfterTestcase')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_handles_testplan_end_command(self):
-        """ Tests the Automation server's ability to handle a testplan start
-        command."""
-
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), None)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'AfterTestplan')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    # Makes all time.sleep commands call a mock function that returns
-    # immediately, rather than sleeping.
-    @mock.patch('time.sleep')
-    # Prevents the controller to try to download the results from the FTP server
-    @mock.patch('antlion.controllers.rohdeschwarz_lib.contest'
-                '.Contest.pull_test_results')
-    def test_execute_testplan_stops_reading_output_on_exit_line(
-            self, time_mock, results_func_mock):
-        """ Makes sure that execute_test plan returns after receiving an
-        exit code.
-
-        Args:
-            time_mock: time.sleep mock object.
-            results_func_mock: Contest.pull_test_results mock object.
-        """
-
-        service_output = mock.Mock()
-        # An array of what return values. If a value is an Exception, the
-        # Exception is raised instead.
-        service_output.side_effect = [
-            'Output line 1\n', 'Output line 2\n',
-            'Testplan Directory: \\\\a\\b\\c\n'
-            'Exit code: 0\n',
-            AssertionError('Tried to read output after exit code was sent.')
-        ]
-
-        with mock.patch('zeep.client.Client') as zeep_client:
-            zeep_client.return_value.service.DoGetOutput = service_output
-            controller = contest.Contest(logger=self.log,
-                                         remote_ip=None,
-                                         remote_port=None,
-                                         automation_listen_ip=None,
-                                         automation_port=None,
-                                         dut_on_func=None,
-                                         dut_off_func=None,
-                                         ftp_usr=None,
-                                         ftp_pwd=None)
-
-        controller.execute_testplan('TestPlan')
-        controller.destroy()
-
-    # Makes all time.sleep commands call a mock function that returns
-    # immediately, rather than sleeping.
-    @mock.patch('time.sleep')
-    # Prevents the controller to try to download the results from the FTP server
-    @mock.patch.object(contest.Contest, 'pull_test_results')
-    def test_execute_testplan_detects_results_directory(
-            self, time_mock, results_func_mock):
-        """ Makes sure that execute_test is able to detect the testplan
-        directory from the test output.
-
-        Args:
-            time_mock: time.sleep mock object.
-            results_func_mock: Contest.pull_test_results mock object.
-        """
-
-        results_directory = 'results\directory\\name'
-
-        service_output = mock.Mock()
-        # An array of what return values. If a value is an Exception, the
-        # Exception is raised instead.
-        service_output.side_effect = [
-            'Testplan Directory: {}{}\\ \n'.format(contest.Contest.FTP_ROOT,
-                                                   results_directory),
-            'Exit code: 0\n'
-        ]
-
-        with mock.patch('zeep.client.Client') as zeep_client:
-            zeep_client.return_value.service.DoGetOutput = service_output
-            controller = contest.Contest(logger=self.log,
-                                         remote_ip=None,
-                                         remote_port=None,
-                                         automation_listen_ip=None,
-                                         automation_port=None,
-                                         dut_on_func=None,
-                                         dut_off_func=None,
-                                         ftp_usr=None,
-                                         ftp_pwd=None)
-
-        controller.execute_testplan('TestPlan')
-
-        controller.pull_test_results.assert_called_with(results_directory)
-        controller.destroy()
-
-    # Makes all time.sleep commands call a mock function that returns
-    # immediately, rather than sleeping.
-    @mock.patch('time.sleep')
-    # Prevents the controller to try to download the results from the FTP server
-    @mock.patch.object(contest.Contest, 'pull_test_results')
-    def test_execute_testplan_fails_when_contest_is_unresponsive(
-            self, time_mock, results_func_mock):
-        """ Makes sure that execute_test plan returns after receiving an
-        exit code.
-
-        Args:
-            time_mock: time.sleep mock object.
-            results_func_mock: Contest.pull_test_results mock object.
-        """
-
-        service_output = mock.Mock()
-        # An array of what return values. If a value is an Exception, the
-        # Exception is raised instead.
-        mock_output = [None] * contest.Contest.MAXIMUM_OUTPUT_READ_RETRIES
-        mock_output.append(
-            AssertionError('Test did not failed after too many '
-                           'unsuccessful retries.'))
-        service_output.side_effect = mock_output
-
-        with mock.patch('zeep.client.Client') as zeep_client:
-            zeep_client.return_value.service.DoGetOutput = service_output
-            controller = contest.Contest(logger=self.log,
-                                         remote_ip=None,
-                                         remote_port=None,
-                                         automation_listen_ip=None,
-                                         automation_port=None,
-                                         dut_on_func=None,
-                                         dut_off_func=None,
-                                         ftp_usr=None,
-                                         ftp_pwd=None)
-
-        try:
-            controller.execute_testplan('TestPlan')
-        except RuntimeError:
-            pass
-
-        controller.destroy()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py b/src/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
index 1714233..c5f49ac 100755
--- a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
+++ b/src/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
@@ -35,9 +35,12 @@
         """
         session = mock.Mock()
 
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
         client._log = mock.Mock()
         client._working_connections = [mock.Mock()]
 
@@ -52,9 +55,12 @@
         """
         session = mock.Mock()
 
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
         client._log = mock.Mock()
         working_connections = [mock.Mock() for _ in range(3)]
         free_connections = [mock.Mock() for _ in range(3)]
@@ -73,20 +79,25 @@
         """
 
         def fail_on_wrong_execution():
-            self.fail('The program is not executing the expected path. '
-                      'Tried to return an available free client, ended up '
-                      'sleeping to wait for client instead.')
+            self.fail(
+                "The program is not executing the expected path. "
+                "Tried to return an available free client, ended up "
+                "sleeping to wait for client instead."
+            )
 
         session = mock.Mock()
 
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
         expected_connection = mock.Mock()
         client._free_connections = [expected_connection]
         client._lock = mock.MagicMock()
 
-        with mock.patch('time.sleep') as sleep_mock:
+        with mock.patch("time.sleep") as sleep_mock:
             sleep_mock.side_effect = fail_on_wrong_execution
 
             connection = client._get_free_connection()
@@ -106,31 +117,38 @@
             client._free_connections.clear()
 
         def fail_on_wrong_execution():
-            self.fail('The program is not executing the expected path. '
-                      'Tried to return an available free client, ended up '
-                      'sleeping to wait for client instead.')
+            self.fail(
+                "The program is not executing the expected path. "
+                "Tried to return an available free client, ended up "
+                "sleeping to wait for client instead."
+            )
 
         session = mock.Mock()
 
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
         client._free_connections = mock.Mock()
         client._lock = mock.MagicMock()
         client._lock.acquire.side_effect = empty_list
         client._free_connections = [mock.Mock()]
 
-        with mock.patch('time.sleep') as sleep_mock:
+        with mock.patch("time.sleep") as sleep_mock:
             sleep_mock.side_effect = fail_on_wrong_execution
 
             try:
                 client._get_free_connection()
             except IndexError:
-                self.fail('Tried to pop free connection when another thread'
-                          'has taken it.')
+                self.fail(
+                    "Tried to pop free connection when another thread" "has taken it."
+                )
         # Assert that the lock has been freed.
-        self.assertEqual(client._lock.acquire.call_count,
-                         client._lock.release.call_count)
+        self.assertEqual(
+            client._lock.acquire.call_count, client._lock.release.call_count
+        )
 
     def test_get_free_connection_sleep(self):
         """Tests rpc_client.RpcClient._get_free_connection().
@@ -140,29 +158,33 @@
 
         session = mock.Mock()
 
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
         client._free_connections = []
         client.max_connections = 0
         client._lock = mock.MagicMock()
         client._free_connections = []
 
-        with mock.patch('time.sleep') as sleep_mock:
+        with mock.patch("time.sleep") as sleep_mock:
             sleep_mock.side_effect = BreakoutError()
             try:
                 client._get_free_connection()
             except BreakoutError:
                 # Assert that the lock has been freed.
-                self.assertEqual(client._lock.acquire.call_count,
-                                 client._lock.release.call_count)
+                self.assertEqual(
+                    client._lock.acquire.call_count, client._lock.release.call_count
+                )
                 # Asserts that the sleep has been called.
                 self.assertTrue(sleep_mock.called)
                 # Asserts that no changes to connections happened
                 self.assertEqual(len(client._free_connections), 0)
                 self.assertEqual(len(client._working_connections), 0)
                 return True
-        self.fail('Failed to hit sleep case')
+        self.fail("Failed to hit sleep case")
 
     def test_release_working_connection(self):
         """Tests rpc_client.RpcClient._release_working_connection.
@@ -170,9 +192,12 @@
         Tests that the working connection is moved into the free connections.
         """
         session = mock.Mock()
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
 
         connection = mock.Mock()
         client._working_connections = [connection]
@@ -183,13 +208,14 @@
         self.assertFalse(connection in client._working_connections)
 
     def test_future(self):
-        """Tests rpc_client.RpcClient.future.
-
-        """
+        """Tests rpc_client.RpcClient.future."""
         session = mock.Mock()
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
 
         self.assertEqual(client.future, client._async_client)
 
@@ -199,20 +225,24 @@
         Tests that the name, args, and kwargs are correctly passed to self.rpc.
         """
         session = mock.Mock()
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
         client.rpc = mock.MagicMock()
         fn = client.fake_function_please_do_not_be_implemented
 
-        fn('arg1', 'arg2', kwarg1=1, kwarg2=2)
+        fn("arg1", "arg2", kwarg1=1, kwarg2=2)
         client.rpc.assert_called_with(
-            'fake_function_please_do_not_be_implemented',
-            'arg1',
-            'arg2',
+            "fake_function_please_do_not_be_implemented",
+            "arg1",
+            "arg2",
             kwarg1=1,
-            kwarg2=2)
+            kwarg2=2,
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py b/src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
index 35a3425..d76368f 100755
--- a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
+++ b/src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
@@ -44,8 +44,7 @@
     """
 
     @staticmethod
-    def mock_rpc_connection(response=MOCK_RESP,
-                            uid=rpc_connection.UNKNOWN_UID):
+    def mock_rpc_connection(response=MOCK_RESP, uid=rpc_connection.UNKNOWN_UID):
         """Sets up a faked socket file from the mock connection."""
         fake_file = MockSocketFile(response)
         fake_conn = mock.MagicMock()
@@ -53,8 +52,7 @@
         adb = mock.Mock()
         ports = mock.Mock()
 
-        return rpc_connection.RpcConnection(
-            adb, ports, fake_conn, fake_file, uid=uid)
+        return rpc_connection.RpcConnection(adb, ports, fake_conn, fake_file, uid=uid)
 
     def test_open_chooses_init_on_unknown_uid(self):
         """Tests rpc_connection.RpcConnection.open().
@@ -64,8 +62,7 @@
 
         def pass_on_init(start_command):
             if not start_command == rpc_connection.Sl4aConnectionCommand.INIT:
-                self.fail(
-                    'Must call "init". Called "%s" instead.' % start_command)
+                self.fail('Must call "init". Called "%s" instead.' % start_command)
 
         connection = self.mock_rpc_connection()
         connection._initiate_handshake = pass_on_init
@@ -79,8 +76,7 @@
 
         def pass_on_continue(start_command):
             if start_command != rpc_connection.Sl4aConnectionCommand.CONTINUE:
-                self.fail('Must call "continue". Called "%s" instead.' %
-                          start_command)
+                self.fail('Must call "continue". Called "%s" instead.' % start_command)
 
         connection = self.mock_rpc_connection(uid=1)
         connection._initiate_handshake = pass_on_continue
@@ -93,8 +89,7 @@
         has the correct parameters.
         """
         connection = self.mock_rpc_connection()
-        connection._initiate_handshake(
-            rpc_connection.Sl4aConnectionCommand.INIT)
+        connection._initiate_handshake(rpc_connection.Sl4aConnectionCommand.INIT)
 
         self.assertEqual(connection.uid, 1)
 
@@ -105,8 +100,7 @@
         will not be given a uid.
         """
         connection = self.mock_rpc_connection(MOCK_RESP_UNKNOWN_UID)
-        connection._initiate_handshake(
-            rpc_connection.Sl4aConnectionCommand.INIT)
+        connection._initiate_handshake(rpc_connection.Sl4aConnectionCommand.INIT)
 
         self.assertEqual(connection.uid, rpc_client.UNKNOWN_UID)
 
@@ -116,13 +110,13 @@
         Test that if a handshake receives no response then it will give a
         protocol error.
         """
-        connection = self.mock_rpc_connection(b'')
+        connection = self.mock_rpc_connection(b"")
 
         with self.assertRaises(
-                rpc_client.Sl4aProtocolError,
-                msg=rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE):
-            connection._initiate_handshake(
-                rpc_connection.Sl4aConnectionCommand.INIT)
+            rpc_client.Sl4aProtocolError,
+            msg=rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE,
+        ):
+            connection._initiate_handshake(rpc_connection.Sl4aConnectionCommand.INIT)
 
     def test_cmd_properly_formatted(self):
         """Tests rpc_connection.RpcConnection._cmd().
@@ -130,10 +124,11 @@
         Tests that the command sent is properly formatted.
         """
         connection = self.mock_rpc_connection(MOCK_RESP)
-        connection._cmd('test')
+        connection._cmd("test")
         self.assertIn(
             connection._socket_file.last_write,
-            [b'{"cmd": "test", "uid": -1}\n', b'{"uid": -1, "cmd": "test"}\n'])
+            [b'{"cmd": "test", "uid": -1}\n', b'{"uid": -1, "cmd": "test"}\n'],
+        )
 
     def test_get_new_ticket(self):
         """Tests rpc_connection.RpcConnection.get_new_ticket().
@@ -141,8 +136,7 @@
         Tests that a new number is always given for get_new_ticket().
         """
         connection = self.mock_rpc_connection(MOCK_RESP)
-        self.assertEqual(connection.get_new_ticket() + 1,
-                         connection.get_new_ticket())
+        self.assertEqual(connection.get_new_ticket() + 1, connection.get_new_ticket())
 
 
 if __name__ == "__main__":
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py b/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
index 459f9a7..783cee2 100755
--- a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
+++ b/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
@@ -33,7 +33,7 @@
         Tests that a new Sl4aManager is returned without an error.
         """
         adb = mock.Mock()
-        adb.serial = 'SERIAL'
+        adb.serial = "SERIAL"
         sl4a_man = sl4a_manager.create_sl4a_manager(adb)
         self.assertEqual(sl4a_man.adb, adb)
 
@@ -44,11 +44,11 @@
         new Sl4aManager, and returns the first created Sl4aManager instead.
         """
         adb = mock.Mock()
-        adb.serial = 'SERIAL'
+        adb.serial = "SERIAL"
         first_manager = sl4a_manager.create_sl4a_manager(adb)
 
         adb_same_serial = mock.Mock()
-        adb_same_serial.serial = 'SERIAL'
+        adb_same_serial.serial = "SERIAL"
         second_manager = sl4a_manager.create_sl4a_manager(adb)
 
         self.assertEqual(first_manager, second_manager)
@@ -60,11 +60,11 @@
         each device gets its own Sl4aManager object.
         """
         adb_1 = mock.Mock()
-        adb_1.serial = 'SERIAL'
+        adb_1.serial = "SERIAL"
         first_manager = sl4a_manager.create_sl4a_manager(adb_1)
 
         adb_2 = mock.Mock()
-        adb_2.serial = 'DIFFERENT_SERIAL_NUMBER'
+        adb_2.serial = "DIFFERENT_SERIAL_NUMBER"
         second_manager = sl4a_manager.create_sl4a_manager(adb_2)
 
         self.assertNotEqual(first_manager, second_manager)
@@ -72,48 +72,39 @@
 
 class Sl4aManagerTest(unittest.TestCase):
     """Tests the sl4a_manager.Sl4aManager class."""
-    ATTEMPT_INTERVAL = .25
+
+    ATTEMPT_INTERVAL = 0.25
     MAX_WAIT_ON_SERVER_SECONDS = 1
-    _SL4A_LAUNCH_SERVER_CMD = ''
-    _SL4A_CLOSE_SERVER_CMD = ''
-    _SL4A_ROOT_FIND_PORT_CMD = ''
-    _SL4A_USER_FIND_PORT_CMD = ''
-    _SL4A_START_SERVICE_CMD = ''
+    _SL4A_LAUNCH_SERVER_CMD = ""
+    _SL4A_CLOSE_SERVER_CMD = ""
+    _SL4A_ROOT_FIND_PORT_CMD = ""
+    _SL4A_USER_FIND_PORT_CMD = ""
+    _SL4A_START_SERVICE_CMD = ""
 
     @classmethod
     def setUpClass(cls):
         # Copy all module constants before testing begins.
-        Sl4aManagerTest.ATTEMPT_INTERVAL = \
-            sl4a_manager.ATTEMPT_INTERVAL
-        Sl4aManagerTest.MAX_WAIT_ON_SERVER_SECONDS = \
+        Sl4aManagerTest.ATTEMPT_INTERVAL = sl4a_manager.ATTEMPT_INTERVAL
+        Sl4aManagerTest.MAX_WAIT_ON_SERVER_SECONDS = (
             sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS
-        Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD = \
-            sl4a_manager._SL4A_LAUNCH_SERVER_CMD
-        Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD = \
-            sl4a_manager._SL4A_CLOSE_SERVER_CMD
-        Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD = \
-            sl4a_manager._SL4A_ROOT_FIND_PORT_CMD
-        Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD = \
-            sl4a_manager._SL4A_USER_FIND_PORT_CMD
-        Sl4aManagerTest._SL4A_START_SERVICE_CMD = \
-            sl4a_manager._SL4A_START_SERVICE_CMD
+        )
+        Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD = sl4a_manager._SL4A_LAUNCH_SERVER_CMD
+        Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD = sl4a_manager._SL4A_CLOSE_SERVER_CMD
+        Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD = sl4a_manager._SL4A_ROOT_FIND_PORT_CMD
+        Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD = sl4a_manager._SL4A_USER_FIND_PORT_CMD
+        Sl4aManagerTest._SL4A_START_SERVICE_CMD = sl4a_manager._SL4A_START_SERVICE_CMD
 
     def setUp(self):
         # Restore all module constants at the beginning of each test case.
-        sl4a_manager.ATTEMPT_INTERVAL = \
-            Sl4aManagerTest.ATTEMPT_INTERVAL
-        sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS = \
+        sl4a_manager.ATTEMPT_INTERVAL = Sl4aManagerTest.ATTEMPT_INTERVAL
+        sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS = (
             Sl4aManagerTest.MAX_WAIT_ON_SERVER_SECONDS
-        sl4a_manager._SL4A_LAUNCH_SERVER_CMD = \
-            Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD
-        sl4a_manager._SL4A_CLOSE_SERVER_CMD = \
-            Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD
-        sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = \
-            Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD
-        sl4a_manager._SL4A_USER_FIND_PORT_CMD = \
-            Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD
-        sl4a_manager._SL4A_START_SERVICE_CMD = \
-            Sl4aManagerTest._SL4A_START_SERVICE_CMD
+        )
+        sl4a_manager._SL4A_LAUNCH_SERVER_CMD = Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD
+        sl4a_manager._SL4A_CLOSE_SERVER_CMD = Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD
+        sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD
+        sl4a_manager._SL4A_USER_FIND_PORT_CMD = Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD
+        sl4a_manager._SL4A_START_SERVICE_CMD = Sl4aManagerTest._SL4A_START_SERVICE_CMD
 
         # Reset module data at the beginning of each test.
         sl4a_manager._all_sl4a_managers = {}
@@ -148,7 +139,7 @@
         # One call for each session
         self.assertSetEqual(set(returned_ports), {12345, 15973, 67890, 75638})
 
-    @mock.patch('time.sleep', return_value=None)
+    @mock.patch("time.sleep", return_value=None)
     def test_start_sl4a_server_uses_all_retries(self, _):
         """Tests sl4a_manager.Sl4aManager.start_sl4a_server().
 
@@ -156,12 +147,14 @@
         a port.
         """
         adb = mock.Mock()
-        adb.shell = lambda _, **kwargs: ''
+        adb.shell = lambda _, **kwargs: ""
 
         side_effects = []
         expected_port = 12345
-        for _ in range(int(sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS /
-                           sl4a_manager.ATTEMPT_INTERVAL) - 1):
+        for _ in range(
+            int(sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS / sl4a_manager.ATTEMPT_INTERVAL)
+            - 1
+        ):
             side_effects.append(None)
         side_effects.append(expected_port)
 
@@ -171,9 +164,9 @@
             found_port = manager.start_sl4a_server(0)
             self.assertTrue(found_port)
         except rpc_client.Sl4aConnectionError:
-            self.fail('start_sl4a_server failed to respect FIND_PORT_RETRIES.')
+            self.fail("start_sl4a_server failed to respect FIND_PORT_RETRIES.")
 
-    @mock.patch('time.sleep', return_value=None)
+    @mock.patch("time.sleep", return_value=None)
     def test_start_sl4a_server_fails_all_retries(self, _):
         """Tests sl4a_manager.Sl4aManager.start_sl4a_server().
 
@@ -181,18 +174,19 @@
         fail.
         """
         adb = mock.Mock()
-        adb.shell = lambda _, **kwargs: ''
+        adb.shell = lambda _, **kwargs: ""
 
         side_effects = []
-        for _ in range(int(sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS /
-                           sl4a_manager.ATTEMPT_INTERVAL)):
+        for _ in range(
+            int(sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS / sl4a_manager.ATTEMPT_INTERVAL)
+        ):
             side_effects.append(None)
 
         manager = sl4a_manager.create_sl4a_manager(adb)
         manager._get_open_listening_port = mock.Mock(side_effect=side_effects)
         try:
             manager.start_sl4a_server(0)
-            self.fail('Sl4aConnectionError was not thrown.')
+            self.fail("Sl4aConnectionError was not thrown.")
         except rpc_client.Sl4aConnectionError:
             pass
 
@@ -204,7 +198,7 @@
         """
         adb = mock.Mock()
         adb.is_root = lambda: True
-        command = 'ngo45hke3b4vie3mv5ni93,vfu3j'
+        command = "ngo45hke3b4vie3mv5ni93,vfu3j"
         sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = command
 
         manager = sl4a_manager.create_sl4a_manager(adb)
@@ -219,7 +213,7 @@
         adb = mock.Mock()
         adb.is_root = lambda: False
         adb.ensure_root = lambda: True
-        command = 'ngo45hke3b4vie3mv5ni93,vfu3j'
+        command = "ngo45hke3b4vie3mv5ni93,vfu3j"
         sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = command
 
         manager = sl4a_manager.create_sl4a_manager(adb)
@@ -234,7 +228,7 @@
         adb = mock.Mock()
         adb.is_root = lambda: False
         adb.ensure_root = lambda: False
-        command = 'ngo45hke3b4vie3mv5ni93,vfu3j'
+        command = "ngo45hke3b4vie3mv5ni93,vfu3j"
         sl4a_manager._SL4A_USER_FIND_PORT_CMD = command
 
         manager = sl4a_manager.create_sl4a_manager(adb)
@@ -246,7 +240,7 @@
         Tests to ensure None is returned if no open port is found.
         """
         adb = mock.Mock()
-        adb.shell = lambda _: ''
+        adb.shell = lambda _: ""
 
         manager = sl4a_manager.create_sl4a_manager(adb)
         self.assertIsNone(manager._get_open_listening_port())
@@ -258,10 +252,10 @@
         marked as in used.
         """
         adb = mock.Mock()
-        adb.shell = lambda _: '12345 67890'
+        adb.shell = lambda _: "12345 67890"
 
         manager = sl4a_manager.create_sl4a_manager(adb)
-        manager._sl4a_ports = {'12345', '67890'}
+        manager._sl4a_ports = {"12345", "67890"}
         self.assertIsNone(manager._get_open_listening_port())
 
     def test_get_open_listening_port_port_is_avaiable(self):
@@ -271,10 +265,10 @@
         marked as used.
         """
         adb = mock.Mock()
-        adb.shell = lambda _: '12345 67890'
+        adb.shell = lambda _: "12345 67890"
 
         manager = sl4a_manager.create_sl4a_manager(adb)
-        manager._sl4a_ports = {'12345'}
+        manager._sl4a_ports = {"12345"}
         self.assertEqual(manager._get_open_listening_port(), 67890)
 
     def test_is_sl4a_installed_is_true(self):
@@ -283,7 +277,7 @@
         Tests is_sl4a_installed() returns true when pm returns data
         """
         adb = mock.Mock()
-        adb.shell = lambda _, **kwargs: 'asdf'
+        adb.shell = lambda _, **kwargs: "asdf"
         manager = sl4a_manager.create_sl4a_manager(adb)
         self.assertTrue(manager.is_sl4a_installed())
 
@@ -293,7 +287,7 @@
         Tests is_sl4a_installed() returns true when pm returns data
         """
         adb = mock.Mock()
-        adb.shell = lambda _, **kwargs: ''
+        adb.shell = lambda _, **kwargs: ""
         manager = sl4a_manager.create_sl4a_manager(adb)
         self.assertFalse(manager.is_sl4a_installed())
 
@@ -308,7 +302,7 @@
         manager.is_sl4a_installed = lambda: False
         try:
             manager.start_sl4a_service()
-            self.fail('An error should have been thrown.')
+            self.fail("An error should have been thrown.")
         except rpc_client.Sl4aNotInstalledError:
             pass
 
@@ -318,14 +312,14 @@
         Tests that SL4A is started if it was not already running.
         """
         adb = mock.Mock()
-        adb.shell = mock.Mock(side_effect=['', '', ''])
+        adb.shell = mock.Mock(side_effect=["", "", ""])
 
         manager = sl4a_manager.create_sl4a_manager(adb)
         manager.is_sl4a_installed = lambda: True
         try:
             manager.start_sl4a_service()
         except rpc_client.Sl4aNotInstalledError:
-            self.fail('An error should not have been thrown.')
+            self.fail("An error should not have been thrown.")
         adb.shell.assert_called_with(sl4a_manager._SL4A_START_SERVICE_CMD)
 
     def test_create_session_uses_oldest_server_port(self):
@@ -352,8 +346,7 @@
         manager.sessions[1] = session_1
         manager.sessions[2] = session_2
 
-        with mock.patch.object(
-                rpc_client.RpcClient, '__init__', return_value=None):
+        with mock.patch.object(rpc_client.RpcClient, "__init__", return_value=None):
             created_session = manager.create_session()
 
         self.assertEqual(created_session.server_port, session_1.server_port)
@@ -370,8 +363,7 @@
         # Ignore starting SL4A.
         manager.start_sl4a_service = lambda: None
 
-        with mock.patch.object(
-                rpc_client.RpcClient, '__init__', return_value=None):
+        with mock.patch.object(rpc_client.RpcClient, "__init__", return_value=None):
             created_session = manager.create_session()
 
         self.assertEqual(created_session.server_port, 0)
@@ -402,11 +394,9 @@
         manager._get_all_ports = lambda: []
         manager.terminate_all_sessions()
         # No duplicates calls to terminate.
-        self.assertEqual(
-            len(called_terminate_on), len(set(called_terminate_on)))
+        self.assertEqual(len(called_terminate_on), len(set(called_terminate_on)))
         # One call for each session
-        self.assertSetEqual(
-            set(called_terminate_on), {session_1, session_4, session_5})
+        self.assertSetEqual(set(called_terminate_on), {session_1, session_4, session_5})
 
     def test_terminate_all_session_close_each_server(self):
         """Tests sl4a_manager.Sl4aManager.terminate_all_sessions().
@@ -418,12 +408,12 @@
         def close(command):
             if str.isdigit(command):
                 closed_ports.append(command)
-            return ''
+            return ""
 
         adb = mock.Mock()
         adb.shell = close
-        sl4a_manager._SL4A_CLOSE_SERVER_CMD = '%s'
-        ports_to_close = {'12345', '67890', '24680', '13579'}
+        sl4a_manager._SL4A_CLOSE_SERVER_CMD = "%s"
+        ports_to_close = {"12345", "67890", "24680", "13579"}
 
         manager = sl4a_manager.Sl4aManager(adb)
         manager._sl4a_ports = set(ports_to_close)
@@ -449,10 +439,10 @@
         self.assertTrue(manager.start_sl4a_server.called)
 
     @mock.patch(
-        'antlion.controllers.sl4a_lib.sl4a_manager.Sl4aManager.sl4a_ports_in_use',
-        new_callable=mock.PropertyMock)
-    def test_obtain_sl4a_server_returns_existing_server(
-            self, sl4a_ports_in_use):
+        "antlion.controllers.sl4a_lib.sl4a_manager.Sl4aManager.sl4a_ports_in_use",
+        new_callable=mock.PropertyMock,
+    )
+    def test_obtain_sl4a_server_returns_existing_server(self, sl4a_ports_in_use):
         """Tests sl4a_manager.Sl4aManager.obtain_sl4a_server().
 
         Tests that an existing server is returned if it is already opened.
@@ -468,5 +458,5 @@
         self.assertEqual(12345, ret)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py b/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
index efa7073..e812313 100755
--- a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
+++ b/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
@@ -50,7 +50,7 @@
         session.is_alive = Sl4aSession.is_alive
         self.assertNotEqual(session._terminated, session.is_alive)
 
-    @patch('antlion.controllers.sl4a_lib.event_dispatcher.EventDispatcher')
+    @patch("antlion.controllers.sl4a_lib.event_dispatcher.EventDispatcher")
     def test_get_event_dispatcher_create_on_none(self, _):
         """Tests Sl4aSession.get_event_dispatcher.
 
@@ -68,7 +68,7 @@
         Tests that the existing event_dispatcher is returned.
         """
         session = mock.Mock()
-        session._event_dispatcher = 'Something that is not None'
+        session._event_dispatcher = "Something that is not None"
         ed = Sl4aSession.get_event_dispatcher(session)
         self.assertEqual(session._event_dispatcher, ed)
 
@@ -80,7 +80,7 @@
         """
         session = mock.Mock()
         session._create_client_side_connection = mock.Mock()
-        with mock.patch('socket.socket') as socket:
+        with mock.patch("socket.socket") as socket:
             # Throw an error when trying to bind to the hinted port.
             error = OSError()
             error.errno = errno.EADDRINUSE
@@ -90,7 +90,8 @@
             socket.return_value = socket_instance
 
             Sl4aSession._create_client_side_connection(
-                session, sl4a_ports.Sl4aPorts(1, 2, 3))
+                session, sl4a_ports.Sl4aPorts(1, 2, 3)
+            )
 
         fn = session._create_client_side_connection
         self.assertEqual(fn.call_count, 1)
@@ -106,7 +107,7 @@
         session = mock.Mock()
         session._create_client_side_connection = mock.Mock()
         error = timeout()
-        with mock.patch('socket.socket') as socket:
+        with mock.patch("socket.socket") as socket:
             # Throw an error when trying to bind to the hinted port.
             socket_instance = mock.Mock()
             socket_instance.connect = mock.Mock()
@@ -115,7 +116,8 @@
 
             with self.assertRaises(rpc_client.Sl4aConnectionError):
                 Sl4aSession._create_client_side_connection(
-                    session, sl4a_ports.Sl4aPorts(0, 2, 3))
+                    session, sl4a_ports.Sl4aPorts(0, 2, 3)
+                )
 
     def test_create_client_side_connection_hint_taken_during_fn(self):
         """Tests Sl4aSession._create_client_side_connection().
@@ -127,7 +129,7 @@
         session._create_client_side_connection = mock.Mock()
         error = socket_error()
         error.errno = errno.EADDRNOTAVAIL
-        with mock.patch('socket.socket') as socket:
+        with mock.patch("socket.socket") as socket:
             # Throw an error when trying to bind to the hinted port.
             socket_instance = mock.Mock()
             socket_instance.connect = mock.Mock()
@@ -135,7 +137,8 @@
             socket.return_value = socket_instance
 
             Sl4aSession._create_client_side_connection(
-                session, sl4a_ports.Sl4aPorts(0, 2, 3))
+                session, sl4a_ports.Sl4aPorts(0, 2, 3)
+            )
 
         fn = session._create_client_side_connection
         self.assertEqual(fn.call_count, 1)
@@ -154,7 +157,7 @@
         error = socket_error()
         # Some error that isn't EADDRNOTAVAIL
         error.errno = errno.ESOCKTNOSUPPORT
-        with mock.patch('socket.socket') as socket:
+        with mock.patch("socket.socket") as socket:
             # Throw an error when trying to bind to the hinted port.
             socket_instance = mock.Mock()
             socket_instance.connect = mock.Mock()
@@ -163,7 +166,8 @@
 
             with self.assertRaises(socket_error):
                 Sl4aSession._create_client_side_connection(
-                    session, sl4a_ports.Sl4aPorts(0, 2, 3))
+                    session, sl4a_ports.Sl4aPorts(0, 2, 3)
+                )
 
     def test_terminate_only_closes_if_not_terminated(self):
         """Tests Sl4aSession.terminate()
@@ -202,9 +206,9 @@
         mock_session.adb = mock_adb
         mock_session.log = mock.Mock()
 
-        self.assertEqual(8080,
-                         Sl4aSession._create_forwarded_port(
-                             mock_session, 9999, 8080))
+        self.assertEqual(
+            8080, Sl4aSession._create_forwarded_port(mock_session, 9999, 8080)
+        )
 
     def test_create_forwarded_port_fail_once(self):
         """Tests that _create_forwarded_port can return a non-hinted port.
@@ -215,23 +219,26 @@
         mock_adb.get_version_number = lambda: 37
 
         mock_adb.tcp_forward = mock.Mock(
-            side_effect=AdbError('cmd', 'stdout', stderr='cannot bind listener',
-                                 ret_code=1))
+            side_effect=AdbError(
+                "cmd", "stdout", stderr="cannot bind listener", ret_code=1
+            )
+        )
         mock_session = mock.MagicMock()
         mock_session.adb = mock_adb
         mock_session.log = mock.Mock()
         mock_session._create_forwarded_port = lambda *args, **kwargs: 12345
 
-        self.assertEqual(12345,
-                         Sl4aSession._create_forwarded_port(mock_session, 9999,
-                                                            8080))
+        self.assertEqual(
+            12345, Sl4aSession._create_forwarded_port(mock_session, 9999, 8080)
+        )
 
     def test_create_forwarded_port_raises_if_adb_version_is_old(self):
         """Tests that _create_forwarded_port raises if adb version < 37."""
         mock_adb = mock.Mock()
         mock_adb.get_version_number = lambda: 31
         mock_adb.tcp_forward = lambda _, __: self.fail(
-            'Calling adb.tcp_forward despite ADB version being too old.')
+            "Calling adb.tcp_forward despite ADB version being too old."
+        )
         mock_session = mock.Mock()
         mock_session.adb = mock_adb
         mock_session.log = mock.Mock()
@@ -239,5 +246,5 @@
             Sl4aSession._create_forwarded_port(mock_session, 9999, 0)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/event/decorators_test.py b/src/antlion/unit_tests/event/decorators_test.py
index 2cb1dfc..078e013 100755
--- a/src/antlion/unit_tests/event/decorators_test.py
+++ b/src/antlion/unit_tests/event/decorators_test.py
@@ -65,7 +65,7 @@
     def test_subscribe_calling_the_function_returns_normally(self):
         """tests that functions decorated by subscribe can be called."""
         dummy_class = DecoratorsTest.DummyClass()
-        self.assertEqual(dummy_class.test(''), dummy_class.mock)
+        self.assertEqual(dummy_class.test(""), dummy_class.mock)
 
 
 class DummyEvent(Event):
@@ -73,13 +73,15 @@
 
 
 class RegisterStaticSubscriptionsTest(TestCase):
-
     def test_register_static_subscriptions_returns_passed_in_object(self):
         obj = Mock()
         returned_value = register_static_subscriptions(obj)
-        self.assertEqual(obj, returned_value,
-                         'register_static_subscriptions returned a value other'
-                         'than the object passed in.')
+        self.assertEqual(
+            obj,
+            returned_value,
+            "register_static_subscriptions returned a value other"
+            "than the object passed in.",
+        )
 
     def test_register_static_subscriptions_registers_properly(self):
         @register_static_subscriptions
@@ -94,21 +96,26 @@
         event = DummyEvent()
         event_bus.post(event)
 
-        self.assertEqual(event, RegisterStaticSubscriptionsClass.captured_event,
-                         'register_static_subscriptions did not subscribe '
-                         'RegisterStaticSubscriptionsClass.on_static_event.')
+        self.assertEqual(
+            event,
+            RegisterStaticSubscriptionsClass.captured_event,
+            "register_static_subscriptions did not subscribe "
+            "RegisterStaticSubscriptionsClass.on_static_event.",
+        )
 
 
 class RegisterInstanceSubscriptionsTest(TestCase):
-
     def test_register_instance_subscriptions_returns_passed_in_object(self):
         class SomeClass(object):
             pass
 
         returned_value = register_instance_subscriptions(SomeClass)
-        self.assertEqual(SomeClass, returned_value,
-                         'register_instance_subscriptions returned a value '
-                         'other than the object passed in.')
+        self.assertEqual(
+            SomeClass,
+            returned_value,
+            "register_instance_subscriptions returned a value "
+            "other than the object passed in.",
+        )
 
     def test_register_instance_subscriptions_registers_properly(self):
         @register_instance_subscriptions
@@ -125,10 +132,12 @@
         event_bus.post(event)
 
         self.assertEqual(
-            event, instance.captured_event,
-            'register_instance_subscriptions did not subscribe the instance '
-            'function RegisterInstanceSubscriptionsClass.on_instance_event.')
+            event,
+            instance.captured_event,
+            "register_instance_subscriptions did not subscribe the instance "
+            "function RegisterInstanceSubscriptionsClass.on_instance_event.",
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/event/event_bus_integration_test.py b/src/antlion/unit_tests/event/event_bus_integration_test.py
index 04f5e20..18ba573 100755
--- a/src/antlion/unit_tests/event/event_bus_integration_test.py
+++ b/src/antlion/unit_tests/event/event_bus_integration_test.py
@@ -32,8 +32,9 @@
 
     def __init__(self, configs):
         import mock
+
         self.log = mock.Mock()
-        with mock.patch('mobly.utils.create_dir'):
+        with mock.patch("mobly.utils.create_dir"):
             super().__init__(configs)
 
     @subscribe(Event)
@@ -51,6 +52,7 @@
 
 class EventBusIntegrationTest(TestCase):
     """Tests the EventBus E2E."""
+
     def setUp(self):
         """Clears the event bus of all state."""
         self.called_event = False
@@ -62,10 +64,10 @@
         """Tests that TestClasses have their subscribed functions called."""
         with tempfile.TemporaryDirectory() as tmp_dir:
             test_run_config = mobly_config_parser.TestRunConfig()
-            test_run_config.testbed_name = 'SampleTestBed'
+            test_run_config.testbed_name = "SampleTestBed"
             test_run_config.log_path = tmp_dir
 
-            TestRunner(test_run_config, [('TestClass', [])]).run(TestClass)
+            TestRunner(test_run_config, [("TestClass", [])]).run(TestClass)
 
         self.assertGreaterEqual(len(TestClass.instance_event_received), 1)
         self.assertEqual(len(TestClass.static_event_received), 0)
@@ -83,8 +85,8 @@
     def test_subscribe_instance_bundles(self):
         """Tests that @subscribe bundles register only instance listeners."""
         test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = ''
-        test_run_config.log_path = ''
+        test_run_config.testbed_name = ""
+        test_run_config.log_path = ""
         test_object = TestClass(test_run_config)
         bundle = subscription_bundle.create_from_instance(test_object)
         bundle.register()
@@ -96,6 +98,7 @@
 
     def test_event_register(self):
         """Tests that event.register()'d functions can receive posted Events."""
+
         def event_listener(_):
             self.called_event = True
 
@@ -106,6 +109,7 @@
 
     def test_event_unregister(self):
         """Tests that an event can be registered, and then unregistered."""
+
         def event_listener(_):
             self.called_event = False
 
@@ -116,5 +120,5 @@
         self.assertFalse(self.called_event)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/event/event_bus_test.py b/src/antlion/unit_tests/event/event_bus_test.py
index 2352ea7..5df19b8 100755
--- a/src/antlion/unit_tests/event/event_bus_test.py
+++ b/src/antlion/unit_tests/event/event_bus_test.py
@@ -35,7 +35,7 @@
         """Gets the subscription argument from a register_subscription call."""
         return register_subscription_call[0][0]
 
-    @patch('antlion.event.event_bus._event_bus.register_subscription')
+    @patch("antlion.event.event_bus._event_bus.register_subscription")
     def test_register_registers_a_subscription(self, register_subscription):
         """Tests that register creates and registers a subscription."""
         mock_event = Mock()
@@ -52,7 +52,7 @@
         self.assertEqual(subscription._func, mock_func)
         self.assertEqual(subscription.order, order)
 
-    @patch('antlion.event.event_bus._event_bus.register_subscription')
+    @patch("antlion.event.event_bus._event_bus.register_subscription")
     def test_register_subscriptions_for_list(self, register_subscription):
         """Tests that register_subscription is called for each subscription."""
         mocks = [Mock(), Mock(), Mock()]
@@ -86,9 +86,7 @@
         """Tests that the event_bus can register an existing event type."""
         mock_type = Mock()
         bus = event_bus._event_bus
-        bus._subscriptions[mock_type] = [
-            EventSubscription(mock_type, lambda _: None)
-        ]
+        bus._subscriptions[mock_type] = [EventSubscription(mock_type, lambda _: None)]
         new_subscription = EventSubscription(mock_type, lambda _: True)
 
         reg_id = event_bus.register_subscription(new_subscription)
@@ -143,7 +141,7 @@
         for subscription in mock_subscriptions:
             subscription.deliver.assert_called_once_with(mock_event)
 
-    @patch('antlion.event.event_bus._event_bus.unregister')
+    @patch("antlion.event.event_bus._event_bus.unregister")
     def test_unregister_all_from_list(self, unregister):
         """Tests unregistering from a list unregisters the specified list."""
         unregister_list = [Mock(), Mock()]
@@ -155,7 +153,7 @@
             subscription = args[0]
             self.assertTrue(subscription in unregister_list)
 
-    @patch('antlion.event.event_bus._event_bus.unregister')
+    @patch("antlion.event.event_bus._event_bus.unregister")
     def test_unregister_all_from_event(self, unregister):
         """Tests that all subscriptions under the event are unregistered."""
         mock_event = Mock()
@@ -176,7 +174,7 @@
             subscription = args[0]
             self.assertTrue(subscription in unregister_list)
 
-    @patch('antlion.event.event_bus._event_bus.unregister')
+    @patch("antlion.event.event_bus._event_bus.unregister")
     def test_unregister_all_no_args_unregisters_everything(self, unregister):
         """Tests unregister_all without arguments will unregister everything."""
         mock_event_1 = Mock()
@@ -193,12 +191,14 @@
 
         event_bus.unregister_all()
 
-        self.assertEqual(unregister.call_count,
-                         len(unregister_list_1) + len(unregister_list_2))
+        self.assertEqual(
+            unregister.call_count, len(unregister_list_1) + len(unregister_list_2)
+        )
         for args, _ in unregister.call_args_list:
             subscription = args[0]
-            self.assertTrue(subscription in unregister_list_1
-                            or subscription in unregister_list_2)
+            self.assertTrue(
+                subscription in unregister_list_1 or subscription in unregister_list_2
+            )
 
     def test_unregister_given_an_event_subscription(self):
         """Tests that unregister can unregister a given EventSubscription."""
@@ -212,8 +212,7 @@
 
         self.assertTrue(val)
         self.assertTrue(subscription not in bus._registration_id_map)
-        self.assertTrue(
-            subscription not in bus._subscriptions[type(mock_event)])
+        self.assertTrue(subscription not in bus._subscriptions[type(mock_event)])
 
     def test_unregister_given_a_registration_id(self):
         """Tests that unregister can unregister a given EventSubscription."""
@@ -228,8 +227,7 @@
 
         self.assertTrue(val)
         self.assertTrue(subscription not in bus._registration_id_map)
-        self.assertTrue(
-            subscription not in bus._subscriptions[type(mock_event)])
+        self.assertTrue(subscription not in bus._subscriptions[type(mock_event)])
 
     def test_unregister_given_object_that_is_not_a_subscription(self):
         """Asserts that a ValueError is raised upon invalid arguments."""
@@ -252,8 +250,7 @@
             self.assertEqual(len(bus._registration_id_map), 1)
 
     def test_listen_for_unregisters_listener(self):
-        """Tests listen_for unregisters the listener after the with statement.
-        """
+        """Tests listen_for unregisters the listener after the with statement."""
         bus = event_bus._event_bus
 
         def event_listener(_):
@@ -265,5 +262,5 @@
         self.assertEqual(len(bus._registration_id_map), 0)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/event/event_subscription_test.py b/src/antlion/unit_tests/event/event_subscription_test.py
index 1bdeb8f..3c4a008 100755
--- a/src/antlion/unit_tests/event/event_subscription_test.py
+++ b/src/antlion/unit_tests/event/event_subscription_test.py
@@ -41,8 +41,9 @@
     def test_deliver_dont_deliver_if_event_is_filtered(self):
         """Tests deliver does not call func if the event is filtered out."""
         func = Mock()
-        subscription = EventSubscription(Mock(), func,
-                                         event_filter=self.filter_out_event)
+        subscription = EventSubscription(
+            Mock(), func, event_filter=self.filter_out_event
+        )
 
         subscription.deliver(Mock())
 
@@ -51,12 +52,11 @@
     def test_deliver_deliver_accepted_event(self):
         """Tests deliver does call func when the event is accepted."""
         func = Mock()
-        subscription = EventSubscription(Mock(), func,
-                                         event_filter=self.pass_filter)
+        subscription = EventSubscription(Mock(), func, event_filter=self.pass_filter)
 
         subscription.deliver(Mock())
         self.assertTrue(func.called)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/event/subscription_bundle_test.py b/src/antlion/unit_tests/event/subscription_bundle_test.py
index 654c26e..48d6fe0 100755
--- a/src/antlion/unit_tests/event/subscription_bundle_test.py
+++ b/src/antlion/unit_tests/event/subscription_bundle_test.py
@@ -47,9 +47,10 @@
         self.assertEqual(subscription._event_filter, event_filter)
         self.assertEqual(subscription.order, order)
 
-    @patch('antlion.event.event_bus.register_subscription')
+    @patch("antlion.event.event_bus.register_subscription")
     def test_add_subscription_registers_sub_if_package_is_registered(
-            self, register_subscription):
+        self, register_subscription
+    ):
         """Tests that add_subscription registers the subscription if the
         SubscriptionBundle is already registered."""
         package = SubscriptionBundle()
@@ -81,7 +82,7 @@
 
         self.assertTrue(mock_subscription not in package.subscriptions.keys())
 
-    @patch('antlion.event.event_bus.unregister')
+    @patch("antlion.event.event_bus.unregister")
     def test_remove_subscription_unregisters_subscription(self, unregister):
         """Tests that removing a subscription will also unregister it if the
         SubscriptionBundle is registered."""
@@ -94,10 +95,12 @@
 
         self.assertEqual(unregister.call_count, 1)
         unregistered_obj = unregister.call_args[0][0]
-        self.assertTrue(unregistered_obj == id(mock_subscription) or
-                        unregistered_obj == mock_subscription)
+        self.assertTrue(
+            unregistered_obj == id(mock_subscription)
+            or unregistered_obj == mock_subscription
+        )
 
-    @patch('antlion.event.event_bus.register_subscription')
+    @patch("antlion.event.event_bus.register_subscription")
     def test_register_registers_all_subscriptions(self, register_subscription):
         """Tests register() registers all subscriptions within the bundle."""
         mock_subscription_list = [Mock(), Mock(), Mock()]
@@ -108,13 +111,12 @@
 
         package.register()
 
-        self.assertEqual(register_subscription.call_count,
-                         len(mock_subscription_list))
+        self.assertEqual(register_subscription.call_count, len(mock_subscription_list))
         args = {args[0] for args, _ in register_subscription.call_args_list}
         for subscription in mock_subscription_list:
             self.assertTrue(subscription in args or id(subscription) in args)
 
-    @patch('antlion.event.event_bus.unregister')
+    @patch("antlion.event.event_bus.unregister")
     def test_register_registers_all_subscriptions(self, unregister):
         """Tests register() registers all subscriptions within the bundle."""
         mock_subscription_list = [Mock(), Mock(), Mock()]
@@ -180,13 +182,12 @@
 class SubscribeStaticModuleLevelTest(TestCase):
     def test_create_from_static(self):
         """Tests create_from_static gets all StaticSubscriptionHandles."""
-        bundle = subscription_bundle.create_from_static(
-            sys.modules[self.__module__])
+        bundle = subscription_bundle.create_from_static(sys.modules[self.__module__])
 
         self.assertEqual(len(bundle.subscriptions), 1)
         keys = bundle.subscriptions.keys()
         self.assertIn(static_listener_1.subscription, keys)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/logging/log_stream_test.py b/src/antlion/unit_tests/libs/logging/log_stream_test.py
index 8e1fc78..2dc8790 100755
--- a/src/antlion/unit_tests/libs/logging/log_stream_test.py
+++ b/src/antlion/unit_tests/libs/logging/log_stream_test.py
@@ -42,39 +42,40 @@
 
     @staticmethod
     def patch(imported_name, *args, **kwargs):
-        return mock.patch('antlion.libs.logging.log_stream.%s' % imported_name,
-                          *args, **kwargs)
+        return mock.patch(
+            "antlion.libs.logging.log_stream.%s" % imported_name, *args, **kwargs
+        )
 
     @classmethod
     def setUpClass(cls):
         # logging.log_path only exists if logger._setup_test_logger is called.
         # Here we set it to a value that is likely to not exist so file IO is
         # not executed (an error is raised instead of creating the file).
-        logging.log_path = '/f/a/i/l/p/a/t/h'
+        logging.log_path = "/f/a/i/l/p/a/t/h"
 
     def setUp(self):
         log_stream._log_streams = dict()
 
     # __init__
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_init_adds_null_handler(self, *_):
         """Tests that a NullHandler is added to the logger upon initialization.
         This ensures that no log output is generated when a test class is not
         running.
         """
         debug_monolith_log = LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG
-        with self.patch('MovableFileHandler'):
-            log = log_stream.create_logger(self._testMethodName,
-                                           log_styles=debug_monolith_log)
+        with self.patch("MovableFileHandler"):
+            log = log_stream.create_logger(
+                self._testMethodName, log_styles=debug_monolith_log
+            )
 
         self.assertTrue(isinstance(log.handlers[0], logging.NullHandler))
 
     # __validate_style
 
-    @mock.patch('os.makedirs')
-    def test_validate_styles_raises_when_same_location_set_multiple_times(
-            self, *_):
+    @mock.patch("os.makedirs")
+    def test_validate_styles_raises_when_same_location_set_multiple_times(self, *_):
         """Tests that a style is invalid if it sets the same handler twice.
 
         If the error is NOT raised, then a LogStream can create a Logger that
@@ -83,13 +84,17 @@
         with self.assertRaises(InvalidStyleSetError) as catch:
             log_stream.create_logger(
                 self._testMethodName,
-                log_styles=[LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])
+                log_styles=[
+                    LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
+                ],
+            )
         self.assertTrue(
-            'has been set multiple' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
+            "has been set multiple" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_validate_styles_raises_when_multiple_file_outputs_set(self, *_):
         """Tests that a style is invalid if more than one of MONOLITH_LOG,
         TESTCLASS_LOG, and TESTCASE_LOG is set for the same log level.
@@ -100,32 +105,44 @@
         with self.assertRaises(InvalidStyleSetError) as catch:
             log_stream.create_logger(
                 self._testMethodName,
-                log_styles=[LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG])
+                log_styles=[
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG,
+                ],
+            )
         self.assertTrue(
-            'More than one of' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
+            "More than one of" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
 
         with self.assertRaises(InvalidStyleSetError) as catch:
             log_stream.create_logger(
                 self._testMethodName,
-                log_styles=[LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])
+                log_styles=[
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
+                ],
+            )
         self.assertTrue(
-            'More than one of' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
+            "More than one of" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
 
         with self.assertRaises(InvalidStyleSetError) as catch:
             log_stream.create_logger(
                 self._testMethodName,
-                log_styles=[LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])
+                log_styles=[
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
+                ],
+            )
         self.assertTrue(
-            'More than one of' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
+            "More than one of" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_validate_styles_raises_when_no_level_exists(self, *_):
         """Tests that a style is invalid if it does not contain a log level.
 
@@ -133,14 +150,16 @@
         pass the information coming from the logger to the correct file.
         """
         with self.assertRaises(InvalidStyleSetError) as catch:
-            log_stream.create_logger(self._testMethodName,
-                                     log_styles=[LogStyles.MONOLITH_LOG])
+            log_stream.create_logger(
+                self._testMethodName, log_styles=[LogStyles.MONOLITH_LOG]
+            )
 
         self.assertTrue(
-            'log level' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
+            "log level" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_validate_styles_raises_when_no_location_exists(self, *_):
         """Tests that a style is invalid if it does not contain a log level.
 
@@ -148,14 +167,16 @@
         pass the information coming from the logger to the correct file.
         """
         with self.assertRaises(InvalidStyleSetError) as catch:
-            log_stream.create_logger(self._testMethodName,
-                                     log_styles=[LogStyles.LOG_INFO])
+            log_stream.create_logger(
+                self._testMethodName, log_styles=[LogStyles.LOG_INFO]
+            )
 
         self.assertTrue(
-            'log location' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
+            "log location" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_validate_styles_raises_when_rotate_logs_no_file_handler(self, *_):
         """Tests that a LogStyle cannot set ROTATE_LOGS without *_LOG flag.
 
@@ -169,69 +190,70 @@
                 self._testMethodName,
                 # Added LOG_DEBUG here to prevent the no_level_exists raise from
                 # occurring.
-                log_styles=[LogStyles.LOG_DEBUG + LogStyles.ROTATE_LOGS])
+                log_styles=[LogStyles.LOG_DEBUG + LogStyles.ROTATE_LOGS],
+            )
 
         self.assertTrue(
-            'log type' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
+            "log type" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
 
     # __handle_style
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_handle_style_to_acts_log_creates_handler(self, *_):
         """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler."""
         info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_ACTS_LOG
 
-        log = log_stream.create_logger(self._testMethodName,
-                                       log_styles=info_acts_log)
+        log = log_stream.create_logger(self._testMethodName, log_styles=info_acts_log)
 
         self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_handle_style_to_acts_log_creates_handler_is_lowest_level(self, *_):
         """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler
         that is set to the lowest LogStyles level."""
-        info_acts_log = (LogStyles.LOG_DEBUG + LogStyles.LOG_INFO +
-                         LogStyles.TO_ACTS_LOG)
+        info_acts_log = LogStyles.LOG_DEBUG + LogStyles.LOG_INFO + LogStyles.TO_ACTS_LOG
 
-        log = log_stream.create_logger(self._testMethodName,
-                                       log_styles=info_acts_log)
+        log = log_stream.create_logger(self._testMethodName, log_styles=info_acts_log)
 
         self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))
         self.assertEqual(log.handlers[1].level, logging.DEBUG)
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_handle_style_to_stdout_creates_stream_handler(self, *_):
         """Tests that using the flag TO_STDOUT creates a StreamHandler."""
         info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_STDOUT
 
-        log = log_stream.create_logger(self._testMethodName,
-                                       log_styles=info_acts_log)
+        log = log_stream.create_logger(self._testMethodName, log_styles=info_acts_log)
 
         self.assertTrue(isinstance(log.handlers[1], logging.StreamHandler))
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_handle_style_creates_file_handler(self, *_):
         """Tests handle_style creates a MovableFileHandler for the MONOLITH_LOG."""
         info_acts_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
 
         expected = mock.MagicMock()
-        with self.patch('MovableFileHandler', return_value=expected):
-            log = log_stream.create_logger(self._testMethodName,
-                                           log_styles=info_acts_log)
+        with self.patch("MovableFileHandler", return_value=expected):
+            log = log_stream.create_logger(
+                self._testMethodName, log_styles=info_acts_log
+            )
 
         self.assertEqual(log.handlers[1], expected)
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_handle_style_creates_rotating_file_handler(self, *_):
         """Tests handle_style creates a MovableFileHandler for the ROTATE_LOGS."""
-        info_acts_log = (LogStyles.LOG_INFO + LogStyles.ROTATE_LOGS +
-                         LogStyles.MONOLITH_LOG)
+        info_acts_log = (
+            LogStyles.LOG_INFO + LogStyles.ROTATE_LOGS + LogStyles.MONOLITH_LOG
+        )
 
         expected = mock.MagicMock()
-        with self.patch('MovableRotatingFileHandler', return_value=expected):
-            log = log_stream.create_logger(self._testMethodName,
-                                           log_styles=info_acts_log)
+        with self.patch("MovableRotatingFileHandler", return_value=expected):
+            log = log_stream.create_logger(
+                self._testMethodName, log_styles=info_acts_log
+            )
 
         self.assertEqual(log.handlers[1], expected)
 
@@ -241,13 +263,14 @@
         """Tests that __create_rotating_file_handler does exactly that."""
         expected = mock.MagicMock()
 
-        with self.patch('MovableRotatingFileHandler', return_value=expected):
+        with self.patch("MovableRotatingFileHandler", return_value=expected):
             # Through name-mangling, this function is automatically renamed. See
             # https://docs.python.org/3/tutorial/classes.html#private-variables
-            fh = _LogStream._LogStream__create_rotating_file_handler('')
+            fh = _LogStream._LogStream__create_rotating_file_handler("")
 
-        self.assertEqual(expected, fh,
-                         'The function did not return a MovableRotatingFileHandler.')
+        self.assertEqual(
+            expected, fh, "The function did not return a MovableRotatingFileHandler."
+        )
 
     # __get_file_handler_creator
 
@@ -256,26 +279,30 @@
         has LogStyle.ROTATE_LOGS."""
         expected = mock.MagicMock()
 
-        with self.patch('_LogStream._LogStream__create_rotating_file_handler',
-                        return_value=expected):
+        with self.patch(
+            "_LogStream._LogStream__create_rotating_file_handler", return_value=expected
+        ):
             # Through name-mangling, this function is automatically renamed. See
             # https://docs.python.org/3/tutorial/classes.html#private-variables
             fh_creator = _LogStream._LogStream__get_file_handler_creator(
-                LogStyles.ROTATE_LOGS)
+                LogStyles.ROTATE_LOGS
+            )
 
-        self.assertEqual(expected, fh_creator('/d/u/m/m/y/p/a/t/h'),
-                         'The function did not return a MovableRotatingFileHandler.')
+        self.assertEqual(
+            expected,
+            fh_creator("/d/u/m/m/y/p/a/t/h"),
+            "The function did not return a MovableRotatingFileHandler.",
+        )
 
     def test_get_file_handler_creator_returns_file_handler(self):
         """Tests the function returns a MovableFileHandler when the log_style does NOT
         have LogStyle.ROTATE_LOGS."""
         expected = mock.MagicMock()
 
-        with self.patch('MovableFileHandler', return_value=expected):
+        with self.patch("MovableFileHandler", return_value=expected):
             # Through name-mangling, this function is automatically renamed. See
             # https://docs.python.org/3/tutorial/classes.html#private-variables
-            handler = _LogStream._LogStream__get_file_handler_creator(
-                LogStyles.NONE)()
+            handler = _LogStream._LogStream__get_file_handler_creator(LogStyles.NONE)()
 
         self.assertTrue(isinstance(handler, mock.Mock))
 
@@ -283,49 +310,50 @@
 
     def test_get_lowest_level_gets_lowest_level(self):
         """Tests __get_lowest_level returns the lowest LogStyle level given."""
-        level = _LogStream._LogStream__get_lowest_log_level(
-            LogStyles.ALL_LEVELS)
+        level = _LogStream._LogStream__get_lowest_log_level(LogStyles.ALL_LEVELS)
         self.assertEqual(level, LogStyles.LOG_DEBUG)
 
     # __get_current_output_dir
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_get_current_output_dir_gets_correct_path(self, *_):
-        """Tests __get_current_output_dir gets the correct path from the context
-        """
+        """Tests __get_current_output_dir gets the correct path from the context"""
         info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
 
         base_path = "BASEPATH"
         subcontext = "SUBCONTEXT"
-        with self.patch('MovableFileHandler'):
+        with self.patch("MovableFileHandler"):
             logstream = log_stream._LogStream(
-                self._testMethodName, log_styles=info_monolith_log,
-                base_path=base_path, subcontext=subcontext)
+                self._testMethodName,
+                log_styles=info_monolith_log,
+                base_path=base_path,
+                subcontext=subcontext,
+            )
 
         expected = os.path.join(base_path, subcontext)
-        self.assertEqual(
-            logstream._LogStream__get_current_output_dir(), expected)
+        self.assertEqual(logstream._LogStream__get_current_output_dir(), expected)
 
     # __create_handler
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_create_handler_creates_handler_at_correct_path(self, *_):
         """Tests that __create_handler calls the handler creator with the
         correct absolute path to the log file.
         """
         info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
-        base_path = 'BASEPATH'
-        with self.patch('MovableFileHandler') as file_handler:
+        base_path = "BASEPATH"
+        with self.patch("MovableFileHandler") as file_handler:
             log_stream.create_logger(
-                self._testMethodName, log_styles=info_monolith_log,
-                base_path=base_path)
+                self._testMethodName, log_styles=info_monolith_log, base_path=base_path
+            )
             expected = os.path.join(
-                base_path, '%s_%s.txt' % (self._testMethodName, 'info'))
+                base_path, "%s_%s.txt" % (self._testMethodName, "info")
+            )
             file_handler.assert_called_with(expected)
 
     # __remove_handler
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_remove_handler_removes_a_handler(self, *_):
         """Tests that __remove_handler removes the handler from the logger and
         closes the handler.
@@ -340,38 +368,36 @@
 
     # update_handlers
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_update_handlers_updates_filehandler_target(self, _):
         """Tests that update_handlers invokes the underlying
         MovableFileHandler.set_file method on the correct path.
         """
         info_testclass_log = LogStyles.LOG_INFO + LogStyles.TESTCLASS_LOG
-        file_name = 'FILENAME'
-        with self.patch('MovableFileHandler'):
+        file_name = "FILENAME"
+        with self.patch("MovableFileHandler"):
             log = log_stream.create_logger(
-                self._testMethodName, log_styles=info_testclass_log)
+                self._testMethodName, log_styles=info_testclass_log
+            )
             handler = log.handlers[-1]
             handler.baseFilename = file_name
             stream = log_stream._log_streams[log.name]
-            stream._LogStream__get_current_output_dir = (
-                lambda: 'BASEPATH/TestClass'
-            )
+            stream._LogStream__get_current_output_dir = lambda: "BASEPATH/TestClass"
 
             stream.update_handlers(context.NewTestClassContextEvent())
 
-            handler.set_file.assert_called_with('BASEPATH/TestClass/FILENAME')
+            handler.set_file.assert_called_with("BASEPATH/TestClass/FILENAME")
 
     # cleanup
 
-    @mock.patch('os.makedirs')
+    @mock.patch("os.makedirs")
     def test_cleanup_removes_all_handlers(self, *_):
-        """ Tests that cleanup removes all handlers in the logger, except
+        """Tests that cleanup removes all handlers in the logger, except
         the NullHandler.
         """
         info_testcase_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
-        with self.patch('MovableFileHandler'):
-            log_stream.create_logger(self._testMethodName,
-                                     log_styles=info_testcase_log)
+        with self.patch("MovableFileHandler"):
+            log_stream.create_logger(self._testMethodName, log_styles=info_testcase_log)
 
         created_log_stream = log_stream._log_streams[self._testMethodName]
         created_log_stream.cleanup()
@@ -385,7 +411,7 @@
         # logging.log_path only exists if logger._setup_test_logger is called.
         # Here we set it to a value that is likely to not exist so file IO is
         # not executed (an error is raised instead of creating the file).
-        logging.log_path = '/f/a/i/l/p/a/t/h'
+        logging.log_path = "/f/a/i/l/p/a/t/h"
 
     def setUp(self):
         log_stream._log_streams = {}
@@ -397,17 +423,13 @@
         return context.NewContextEvent()
 
     def test_update_handlers_delegates_calls_to_log_streams(self):
-        """Tests _update_handlers calls update_handlers on each log_stream.
-        """
-        log_stream._log_streams = {
-            'a': mock.Mock(),
-            'b': mock.Mock()
-        }
+        """Tests _update_handlers calls update_handlers on each log_stream."""
+        log_stream._log_streams = {"a": mock.Mock(), "b": mock.Mock()}
 
         log_stream._update_handlers(self.create_new_context_event())
 
-        self.assertTrue(log_stream._log_streams['a'].update_handlers.called)
-        self.assertTrue(log_stream._log_streams['b'].update_handlers.called)
+        self.assertTrue(log_stream._log_streams["a"].update_handlers.called)
+        self.assertTrue(log_stream._log_streams["b"].update_handlers.called)
 
     # _set_logger
 
@@ -416,15 +438,13 @@
         log_stream._log_streams.
         """
         previous = mock.Mock()
-        log_stream._log_streams = {
-            'a': previous
-        }
+        log_stream._log_streams = {"a": previous}
         expected = mock.Mock()
-        expected.name = 'a'
+        expected.name = "a"
         log_stream._set_logger(expected)
 
-        self.assertEqual(log_stream._log_streams['a'], expected)
+        self.assertEqual(log_stream._log_streams["a"], expected)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py b/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
index 3b82d23..633f577 100644
--- a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
+++ b/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
@@ -30,98 +30,97 @@
 
     def setUp(self):
         self.device = mock.MagicMock()
-        self.device.serial = 'fake_serial'
+        self.device.serial = "fake_serial"
 
     def test_get_ota_value_from_config_no_map_key_missing(self):
         acts_config = {}
         with self.assertRaises(config_parser.ActsConfigError):
             ota_runner_factory.get_ota_value_from_config(
-                acts_config, 'ota_tool', self.device)
+                acts_config, "ota_tool", self.device
+            )
 
     def test_get_ota_value_from_config_with_map_key_missing(self):
-        acts_config = {'ota_map': {'fake_serial': 'MockOtaTool'}}
+        acts_config = {"ota_map": {"fake_serial": "MockOtaTool"}}
         with self.assertRaises(config_parser.ActsConfigError):
             ota_runner_factory.get_ota_value_from_config(
-                acts_config, 'ota_tool', self.device)
+                acts_config, "ota_tool", self.device
+            )
 
     def test_get_ota_value_from_config_with_map_key_found(self):
-        expected_value = '/path/to/tool'
+        expected_value = "/path/to/tool"
         acts_config = {
-            'ota_map': {
-                'fake_serial': 'MockOtaTool'
-            },
-            'ota_tool_MockOtaTool': expected_value
+            "ota_map": {"fake_serial": "MockOtaTool"},
+            "ota_tool_MockOtaTool": expected_value,
         }
         ret = ota_runner_factory.get_ota_value_from_config(
-            acts_config, 'ota_tool', self.device)
+            acts_config, "ota_tool", self.device
+        )
         self.assertEqual(expected_value, ret)
 
-    def test_create_from_configs_raise_when_non_default_tool_path_missing(
-            self):
+    def test_create_from_configs_raise_when_non_default_tool_path_missing(self):
         acts_config = {
-            'ota_tool': 'FakeTool',
+            "ota_tool": "FakeTool",
         }
         try:
             ota_runner_factory.create_from_configs(acts_config, self.device)
         except config_parser.ActsConfigError:
             return
-        self.fail('create_from_configs did not throw an error when a tool was'
-                  'specified without a tool path.')
+        self.fail(
+            "create_from_configs did not throw an error when a tool was"
+            "specified without a tool path."
+        )
 
     def test_create_from_configs_without_map_makes_proper_calls(self):
         acts_config = {
-            'ota_package': 'jkl;',
-            'ota_sl4a': 'qaz',
-            'ota_tool': 'FakeTool',
-            'FakeTool': 'qwerty'
+            "ota_package": "jkl;",
+            "ota_sl4a": "qaz",
+            "ota_tool": "FakeTool",
+            "FakeTool": "qwerty",
         }
-        function_path = 'antlion.libs.ota.ota_runners.ota_runner_factory.create'
+        function_path = "antlion.libs.ota.ota_runners.ota_runner_factory.create"
         with mock.patch(function_path) as mocked_function:
             ota_runner_factory.create_from_configs(acts_config, self.device)
-            mocked_function.assert_called_with('jkl;', 'qaz', self.device,
-                                               'FakeTool', 'qwerty')
+            mocked_function.assert_called_with(
+                "jkl;", "qaz", self.device, "FakeTool", "qwerty"
+            )
 
     def test_create_from_configs_with_map_makes_proper_calls(self):
         acts_config = {
-            'ota_map': {
-                'fake_serial': "hardwareA"
-            },
-            'ota_package_hardwareA': 'jkl;',
-            'ota_sl4a_hardwareA': 'qaz',
-            'ota_tool_hardwareA': 'FakeTool',
-            'FakeTool': 'qwerty'
+            "ota_map": {"fake_serial": "hardwareA"},
+            "ota_package_hardwareA": "jkl;",
+            "ota_sl4a_hardwareA": "qaz",
+            "ota_tool_hardwareA": "FakeTool",
+            "FakeTool": "qwerty",
         }
-        function_path = 'antlion.libs.ota.ota_runners.ota_runner_factory.create'
+        function_path = "antlion.libs.ota.ota_runners.ota_runner_factory.create"
         with mock.patch(function_path) as mocked_function:
             ota_runner_factory.create_from_configs(acts_config, self.device)
-            mocked_function.assert_called_with('jkl;', 'qaz', self.device,
-                                               'FakeTool', 'qwerty')
+            mocked_function.assert_called_with(
+                "jkl;", "qaz", self.device, "FakeTool", "qwerty"
+            )
 
-    def test_create_raise_on_ota_pkg_and_sl4a_fields_have_different_types(
-            self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
+    def test_create_raise_on_ota_pkg_and_sl4a_fields_have_different_types(self):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
             with self.assertRaises(TypeError):
-                ota_runner_factory.create('ota_package', ['ota_sl4a'],
-                                          self.device)
+                ota_runner_factory.create("ota_package", ["ota_sl4a"], self.device)
 
     def test_create_raise_on_ota_package_not_a_list_or_string(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
             with self.assertRaises(TypeError):
-                ota_runner_factory.create({'ota': 'pkg'}, {'ota': 'sl4a'},
-                                          self.device)
+                ota_runner_factory.create({"ota": "pkg"}, {"ota": "sl4a"}, self.device)
 
     def test_create_returns_single_ota_runner_on_ota_package_being_a_str(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
-            ret = ota_runner_factory.create('', '', self.device)
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
+            ret = ota_runner_factory.create("", "", self.device)
             self.assertEqual(type(ret), ota_runner.SingleUseOtaRunner)
 
     def test_create_returns_multi_ota_runner_on_ota_package_being_a_list(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
             ret = ota_runner_factory.create([], [], self.device)
             self.assertEqual(type(ret), ota_runner.MultiUseOtaRunner)
 
     def test_create_returns_bound_ota_runner_on_second_request(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
             first_return = ota_runner_factory.create([], [], self.device)
             logging.disable(logging.WARNING)
             second_return = ota_runner_factory.create([], [], self.device)
@@ -129,15 +128,15 @@
             self.assertEqual(first_return, second_return)
 
     def test_create_returns_different_ota_runner_on_second_request(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
-            first_return = ota_runner_factory.create([], [],
-                                                     self.device,
-                                                     use_cached_runners=False)
-            second_return = ota_runner_factory.create([], [],
-                                                      self.device,
-                                                      use_cached_runners=False)
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
+            first_return = ota_runner_factory.create(
+                [], [], self.device, use_cached_runners=False
+            )
+            second_return = ota_runner_factory.create(
+                [], [], self.device, use_cached_runners=False
+            )
             self.assertNotEqual(first_return, second_return)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py b/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
index 9f51368..114ec9b 100644
--- a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
+++ b/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
@@ -48,10 +48,10 @@
     """Sets properties to return an empty string to allow OtaRunner tests."""
 
     def get_sl4a_apk(self):
-        return ''
+        return ""
 
     def get_ota_package(self):
-        return ''
+        return ""
 
     def validate_update(self):
         pass
@@ -70,10 +70,10 @@
     def test_update(self):
         device = mock.MagicMock()
         device.skip_sl4a = False
-        tool = MockOtaTool('mock_command')
+        tool = MockOtaTool("mock_command")
         runner = OtaRunnerImpl(tool, device)
-        runner.android_device.adb.getprop = mock.Mock(side_effect=['a', 'b'])
-        runner.get_post_build_id = lambda: 'abc'
+        runner.android_device.adb.getprop = mock.Mock(side_effect=["a", "b"])
+        runner.get_post_build_id = lambda: "abc"
 
         runner._update()
 
@@ -85,19 +85,19 @@
 
     def test_update_fail_on_no_change_to_build(self):
         device = mock.MagicMock()
-        tool = MockOtaTool('mock_command')
+        tool = MockOtaTool("mock_command")
         runner = OtaRunnerImpl(tool, device)
-        runner.android_device.adb.getprop = mock.Mock(side_effect=['a', 'a'])
-        runner.get_post_build_id = lambda: 'abc'
+        runner.android_device.adb.getprop = mock.Mock(side_effect=["a", "a"])
+        runner.get_post_build_id = lambda: "abc"
         try:
             runner._update()
-            self.fail('Matching build fingerprints did not throw an error!')
+            self.fail("Matching build fingerprints did not throw an error!")
         except ota_runner.OtaError:
             pass
 
     def test_init(self):
         device = mock.MagicMock()
-        tool = MockOtaTool('mock_command')
+        tool = MockOtaTool("mock_command")
         runner = ota_runner.OtaRunner(tool, device)
 
         self.assertEqual(runner.ota_tool, tool)
@@ -106,23 +106,25 @@
 
     def test_get_post_build_id_grabs_valid_data(self):
         device = mock.MagicMock()
-        tool = MockOtaTool('mock_command')
+        tool = MockOtaTool("mock_command")
         runner = OtaRunnerImpl(tool, device)
         ota_package_path = os.path.join(
             os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
+            "dummy_ota_package.zip",
+        )
         runner.get_ota_package = lambda: ota_package_path
-        self.assertEqual(runner.get_post_build_id(), 'post-build_information')
+        self.assertEqual(runner.get_post_build_id(), "post-build_information")
 
     def test_get_ota_package_metadata_value_does_not_exist(self):
         device = mock.MagicMock()
-        tool = MockOtaTool('mock_command')
+        tool = MockOtaTool("mock_command")
         runner = OtaRunnerImpl(tool, device)
         ota_package_path = os.path.join(
             os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
+            "dummy_ota_package.zip",
+        )
         runner.get_ota_package = lambda: ota_package_path
-        self.assertEqual(runner.get_ota_package_metadata('garbage-data'), None)
+        self.assertEqual(runner.get_ota_package_metadata("garbage-data"), None)
 
 
 class SingleUseOtaRunnerTest(unittest.TestCase):
@@ -130,47 +132,47 @@
 
     def setUp(self):
         self.device = mock.MagicMock()
-        self.tool = MockOtaTool('mock_command')
+        self.tool = MockOtaTool("mock_command")
 
     def test_update_first_update_runs(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, '', '')
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
         try:
-            with mock.patch.object(ota_runner.OtaRunner, '_update'):
+            with mock.patch.object(ota_runner.OtaRunner, "_update"):
                 runner.update()
         except ota_runner.OtaError:
-            self.fail('SingleUseOtaRunner threw an exception on the first '
-                      'update call.')
+            self.fail(
+                "SingleUseOtaRunner threw an exception on the first " "update call."
+            )
 
     def test_update_second_update_raises_error(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, '', '')
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
             runner.update()
             try:
                 runner.update()
             except ota_runner.OtaError:
                 return
-        self.fail('SingleUseOtaRunner did not throw an exception on the second'
-                  'update call.')
+        self.fail(
+            "SingleUseOtaRunner did not throw an exception on the second" "update call."
+        )
 
     def test_can_update_no_updates_called(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, '', '')
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
         self.assertEqual(True, runner.can_update())
 
     def test_can_update_has_updated_already(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, '', '')
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
             runner.update()
         self.assertEqual(False, runner.can_update())
 
     def test_get_ota_package(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, 'a',
-                                               'b')
-        self.assertEqual(runner.get_ota_package(), 'a')
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "a", "b")
+        self.assertEqual(runner.get_ota_package(), "a")
 
     def test_get_sl4a_apk(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, 'a',
-                                               'b')
-        self.assertEqual(runner.get_sl4a_apk(), 'b')
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "a", "b")
+        self.assertEqual(runner.get_sl4a_apk(), "b")
 
 
 class MultiUseOtaRunnerTest(unittest.TestCase):
@@ -178,79 +180,104 @@
 
     def setUp(self):
         self.device = mock.MagicMock()
-        self.tool = MockOtaTool('mock_command')
+        self.tool = MockOtaTool("mock_command")
 
     def test_update_first_update_runs(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device, [''],
-                                              [''])
+        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device, [""], [""])
         try:
-            with mock.patch.object(ota_runner.OtaRunner, '_update'):
+            with mock.patch.object(ota_runner.OtaRunner, "_update"):
                 runner.update()
         except ota_runner.OtaError:
-            self.fail('MultiUseOtaRunner threw an exception on the first '
-                      'update call.')
+            self.fail(
+                "MultiUseOtaRunner threw an exception on the first " "update call."
+            )
 
     def test_update_multiple_updates_run(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
             runner.update()
             try:
                 runner.update()
             except ota_runner.OtaError:
-                self.fail('MultiUseOtaRunner threw an exception before '
-                          'running out of update packages.')
+                self.fail(
+                    "MultiUseOtaRunner threw an exception before "
+                    "running out of update packages."
+                )
 
     def test_update_too_many_update_calls_raises_error(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
             runner.update()
             runner.update()
             try:
                 runner.update()
             except ota_runner.OtaError:
                 return
-        self.fail('MultiUseOtaRunner did not throw an exception after running '
-                  'out of update packages.')
+        self.fail(
+            "MultiUseOtaRunner did not throw an exception after running "
+            "out of update packages."
+        )
 
     def test_can_update_no_updates_called(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
         self.assertEqual(True, runner.can_update())
 
     def test_can_update_has_more_updates_left(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
             runner.update()
         self.assertEqual(True, runner.can_update())
 
     def test_can_update_ran_out_of_updates(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
             runner.update()
             runner.update()
         self.assertEqual(False, runner.can_update())
 
     def test_get_ota_package(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        self.assertEqual(runner.get_ota_package(), 'first_pkg')
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        self.assertEqual(runner.get_ota_package(), "first_pkg")
 
     def test_get_sl4a_apk(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        self.assertEqual(runner.get_sl4a_apk(), 'first_apk')
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        self.assertEqual(runner.get_sl4a_apk(), "first_apk")
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py b/src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
index 3816157..19e4abf 100644
--- a/src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
+++ b/src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
@@ -14,10 +14,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import logging
 import mock
-import unittest
-
 import os
+import unittest
 
 from antlion.controllers import android_device
 from antlion.libs.ota.ota_runners import ota_runner
@@ -25,14 +25,16 @@
 from antlion.libs.ota.ota_tools import adb_sideload_ota_tool
 
 
-def get_mock_android_device(serial='', ssh_connection=None):
+def get_mock_android_device(serial="", ssh_connection=None):
     """Returns a mocked AndroidDevice with a mocked adb/fastboot."""
-    with mock.patch('antlion.controllers.adb.AdbProxy') as adb_proxy, (
-            mock.patch('antlion.controllers.fastboot.FastbootProxy')) as fb_proxy:
+    with mock.patch("antlion.controllers.adb.AdbProxy") as adb_proxy, mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy"
+    ) as fb_proxy:
+        adb_proxy.return_value.getprop.return_value = "1.2.3"
         fb_proxy.return_value.devices.return_value = ""
         ret = mock.Mock(
-            android_device.AndroidDevice(serial=serial,
-                                         ssh_connection=ssh_connection))
+            android_device.AndroidDevice(serial=serial, ssh_connection=ssh_connection)
+        )
         fb_proxy.reset_mock()
         return ret
 
@@ -41,13 +43,13 @@
     """Tests the OtaTool class."""
 
     def test_init(self):
-        expected_value = 'commmand string'
-        self.assertEqual(
-            ota_tool.OtaTool(expected_value).command, expected_value)
+        expected_value = "commmand string"
+        self.assertEqual(ota_tool.OtaTool(expected_value).command, expected_value)
 
     def setUp(self):
         self.sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
         ota_runner.SL4A_SERVICE_SETUP_TIME = 0
+        logging.log_path = "/tmp/log"
 
     def tearDown(self):
         ota_runner.SL4A_SERVICE_SETUP_TIME = self.sl4a_service_setup_time
@@ -59,13 +61,13 @@
         device = get_mock_android_device()
         ota_package_path = os.path.join(
             os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
+            "dummy_ota_package.zip",
+        )
         tool = adb_sideload_ota_tool.AdbSideloadOtaTool(ota_package_path)
-        runner = ota_runner.SingleUseOtaRunner(tool, device, ota_package_path,
-                                               '')
-        runner.android_device.adb.getprop = mock.Mock(side_effect=['a', 'b'])
+        runner = ota_runner.SingleUseOtaRunner(tool, device, ota_package_path, "")
+        runner.android_device.adb.getprop = mock.Mock(side_effect=["a", "b"])
         runner.update()
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py b/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
index 700e6b2..1bf516a 100644
--- a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
+++ b/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
@@ -31,23 +31,23 @@
         ota_tool_factory._CONSTRUCTORS = {
             MockOtaTool.__name__: lambda command: MockOtaTool(command),
         }
-        ret = ota_tool_factory.create(MockOtaTool.__name__, 'command')
+        ret = ota_tool_factory.create(MockOtaTool.__name__, "command")
         self.assertEqual(type(ret), MockOtaTool)
         self.assertTrue(ret in ota_tool_factory._constructed_tools.values())
 
     def test_create_not_in_constructors(self):
         ota_tool_factory._CONSTRUCTORS = {}
         with self.assertRaises(KeyError):
-            ota_tool_factory.create(MockOtaTool.__name__, 'command')
+            ota_tool_factory.create(MockOtaTool.__name__, "command")
 
     def test_create_returns_cached_tool(self):
         ota_tool_factory._CONSTRUCTORS = {
             MockOtaTool.__name__: lambda command: MockOtaTool(command),
         }
-        ret_a = ota_tool_factory.create(MockOtaTool.__name__, 'command')
-        ret_b = ota_tool_factory.create(MockOtaTool.__name__, 'command')
+        ret_a = ota_tool_factory.create(MockOtaTool.__name__, "command")
+        ret_b = ota_tool_factory.create(MockOtaTool.__name__, "command")
         self.assertEqual(ret_a, ret_b)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py b/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
index 9fc8784..b9e236b 100644
--- a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
+++ b/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
@@ -22,22 +22,21 @@
     """Tests the OtaTool class."""
 
     def test_init(self):
-        expected_value = 'commmand string'
-        self.assertEqual(
-            ota_tool.OtaTool(expected_value).command, expected_value)
+        expected_value = "commmand string"
+        self.assertEqual(ota_tool.OtaTool(expected_value).command, expected_value)
 
     def test_start_throws_error_on_unimplemented(self):
-        obj = 'some object'
+        obj = "some object"
         with self.assertRaises(NotImplementedError):
-            ota_tool.OtaTool('').update(obj)
+            ota_tool.OtaTool("").update(obj)
 
     def test_end_is_not_abstract(self):
-        obj = 'some object'
+        obj = "some object"
         try:
-            ota_tool.OtaTool('').cleanup(obj)
+            ota_tool.OtaTool("").cleanup(obj)
         except:
-            self.fail('End is not required and should be a virtual function.')
+            self.fail("End is not required and should be a virtual function.")
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py b/src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
index eddf02d..fc54452 100644
--- a/src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
+++ b/src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
@@ -14,24 +14,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import logging
 import mock
-import unittest
-
 import os
+import unittest
 
 from antlion.controllers import android_device
 from antlion.libs.ota.ota_runners import ota_runner
 from antlion.libs.ota.ota_tools import update_device_ota_tool
 
 
-def get_mock_android_device(serial='', ssh_connection=None):
+def get_mock_android_device(serial="", ssh_connection=None):
     """Returns a mocked AndroidDevice with a mocked adb/fastboot."""
-    with mock.patch('antlion.controllers.adb.AdbProxy') as adb_proxy, (
-            mock.patch('antlion.controllers.fastboot.FastbootProxy')) as fb_proxy:
+    with mock.patch("antlion.controllers.adb.AdbProxy") as adb_proxy, mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy"
+    ) as fb_proxy:
+        adb_proxy.return_value.getprop.return_value = "1.2.3"
         fb_proxy.return_value.devices.return_value = ""
         ret = mock.Mock(
-            android_device.AndroidDevice(serial=serial,
-                                         ssh_connection=ssh_connection))
+            android_device.AndroidDevice(serial=serial, ssh_connection=ssh_connection)
+        )
         fb_proxy.reset_mock()
         return ret
 
@@ -42,6 +44,7 @@
     def setUp(self):
         self.sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
         ota_runner.SL4A_SERVICE_SETUP_TIME = 0
+        logging.log_path = "/tmp/log"
 
     def tearDown(self):
         ota_runner.SL4A_SERVICE_SETUP_TIME = self.sl4a_service_setup_time
@@ -49,35 +52,36 @@
     def test_update(self):
         ota_package_path = os.path.join(
             os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
-        with mock.patch('tempfile.mkdtemp') as mkdtemp, (
-                mock.patch('shutil.rmtree')) as rmtree, (
-                    mock.patch('antlion.utils.unzip_maintain_permissions')):
-            mkdtemp.return_value = ''
-            rmtree.return_value = ''
+            "dummy_ota_package.zip",
+        )
+        with mock.patch("tempfile.mkdtemp") as mkdtemp, mock.patch(
+            "shutil.rmtree"
+        ) as rmtree, mock.patch("antlion.utils.unzip_maintain_permissions"):
+            mkdtemp.return_value = ""
+            rmtree.return_value = ""
             device = get_mock_android_device()
             tool = update_device_ota_tool.UpdateDeviceOtaTool(ota_package_path)
-            runner = mock.Mock(
-                ota_runner.SingleUseOtaRunner(tool, device, '', ''))
+            runner = mock.Mock(ota_runner.SingleUseOtaRunner(tool, device, "", ""))
             runner.return_value.android_device = device
-            with mock.patch('antlion.libs.proc.job.run'):
+            with mock.patch("antlion.libs.proc.job.run"):
                 tool.update(runner)
             del tool
 
     def test_del(self):
         ota_package_path = os.path.join(
             os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
-        with mock.patch('tempfile.mkdtemp') as mkdtemp, (
-                mock.patch('shutil.rmtree')) as rmtree, (
-                    mock.patch('antlion.utils.unzip_maintain_permissions')):
-            mkdtemp.return_value = ''
-            rmtree.return_value = ''
+            "dummy_ota_package.zip",
+        )
+        with mock.patch("tempfile.mkdtemp") as mkdtemp, mock.patch(
+            "shutil.rmtree"
+        ) as rmtree, mock.patch("antlion.utils.unzip_maintain_permissions"):
+            mkdtemp.return_value = ""
+            rmtree.return_value = ""
             tool = update_device_ota_tool.UpdateDeviceOtaTool(ota_package_path)
             del tool
             self.assertTrue(mkdtemp.called)
             self.assertTrue(rmtree.called)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_updater_test.py b/src/antlion/unit_tests/libs/ota/ota_updater_test.py
index b6cad1a..ac92019 100644
--- a/src/antlion/unit_tests/libs/ota/ota_updater_test.py
+++ b/src/antlion/unit_tests/libs/ota/ota_updater_test.py
@@ -31,7 +31,7 @@
     def __init__(self):
         self.call_count = 0
         self.should_fail = False
-        self.can_update_value = 'CAN_UPDATE_CALLED'
+        self.can_update_value = "CAN_UPDATE_CALLED"
 
     def set_failure(self, should_fail=True):
         self.should_fail = should_fail
@@ -52,35 +52,34 @@
     """Tests the methods in the ota_updater module."""
 
     def test_initialize(self):
-        user_params = {'a': 1, 'b': 2, 'c': 3}
-        android_devices = ['x', 'y', 'z']
-        with mock.patch('antlion.libs.ota.ota_runners.ota_runner_factory.'
-                        'create_from_configs') as fn:
+        user_params = {"a": 1, "b": 2, "c": 3}
+        android_devices = ["x", "y", "z"]
+        with mock.patch(
+            "antlion.libs.ota.ota_runners.ota_runner_factory." "create_from_configs"
+        ) as fn:
             ota_updater.initialize(user_params, android_devices)
             for i in range(len(android_devices)):
                 fn.assert_any_call(user_params, android_devices[i])
             self.assertSetEqual(
-                set(android_devices), set(ota_updater.ota_runners.keys()))
+                set(android_devices), set(ota_updater.ota_runners.keys())
+            )
 
     def test_check_initialization_is_initialized(self):
-        device = MockAndroidDevice('serial')
-        ota_updater.ota_runners = {
-            device: ota_runner.OtaRunner('tool', device)
-        }
+        device = MockAndroidDevice("serial")
+        ota_updater.ota_runners = {device: ota_runner.OtaRunner("tool", device)}
         try:
             ota_updater._check_initialization(device)
         except ota_runner.OtaError:
-            self.fail(
-                '_check_initialization raised for initialized runner!')
+            self.fail("_check_initialization raised for initialized runner!")
 
     def test_check_initialization_is_not_initialized(self):
-        device = MockAndroidDevice('serial')
+        device = MockAndroidDevice("serial")
         ota_updater.ota_runners = {}
         with self.assertRaises(KeyError):
             ota_updater._check_initialization(device)
 
     def test_update_do_not_ignore_failures_and_failures_occur(self):
-        device = MockAndroidDevice('serial')
+        device = MockAndroidDevice("serial")
         runner = MockOtaRunner()
         runner.set_failure(True)
         ota_updater.ota_runners = {device: runner}
@@ -88,22 +87,21 @@
             ota_updater.update(device)
 
     def test_update_ignore_failures_and_failures_occur(self):
-        device = MockAndroidDevice('serial')
+        device = MockAndroidDevice("serial")
         runner = MockOtaRunner()
         runner.set_failure(True)
         ota_updater.ota_runners = {device: runner}
         try:
             ota_updater.update(device, ignore_update_errors=True)
         except ota_runner.OtaError:
-            self.fail('OtaError was raised when errors are to be ignored!')
+            self.fail("OtaError was raised when errors are to be ignored!")
 
     def test_can_update(self):
-        device = MockAndroidDevice('serial')
+        device = MockAndroidDevice("serial")
         runner = MockOtaRunner()
         ota_updater.ota_runners = {device: runner}
-        self.assertEqual(ota_updater.can_update(device),
-                         'CAN_UPDATE_CALLED')
+        self.assertEqual(ota_updater.can_update(device), "CAN_UPDATE_CALLED")
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/proc/process_test.py b/src/antlion/unit_tests/libs/proc/process_test.py
index 0fd6985..e83ab46 100644
--- a/src/antlion/unit_tests/libs/proc/process_test.py
+++ b/src/antlion/unit_tests/libs/proc/process_test.py
@@ -52,8 +52,9 @@
 
     @staticmethod
     def patch(imported_name, *args, **kwargs):
-        return mock.patch('antlion.libs.proc.process.%s' % imported_name,
-                          *args, **kwargs)
+        return mock.patch(
+            "antlion.libs.proc.process.%s" % imported_name, *args, **kwargs
+        )
 
     # set_on_output_callback
 
@@ -61,7 +62,7 @@
         """Tests that set_on_output_callback sets on_output_callback."""
         callback = mock.Mock()
 
-        process = Process('cmd').set_on_output_callback(callback)
+        process = Process("cmd").set_on_output_callback(callback)
         process._on_output_callback()
 
         self.assertTrue(callback.called)
@@ -72,7 +73,7 @@
         """Tests that set_on_terminate_callback sets _on_terminate_callback."""
         callback = mock.Mock()
 
-        process = Process('cmd').set_on_terminate_callback(callback)
+        process = Process("cmd").set_on_terminate_callback(callback)
         process._on_terminate_callback()
 
         self.assertTrue(callback.called)
@@ -85,29 +86,29 @@
 
         This is required to prevent references to processes and threads from
         being overwritten, potentially causing ACTS to hang."""
-        process = Process('cmd')
+        process = Process("cmd")
 
         # Here we need the thread to start the process object.
         class FakeThreadImpl(FakeThread):
             def _on_start(self):
                 process._process = mock.Mock()
 
-        with self.patch('Thread', FakeThreadImpl):
+        with self.patch("Thread", FakeThreadImpl):
             process.start()
-            expected_msg = 'Process has already started.'
+            expected_msg = "Process has already started."
             with self.assertRaisesRegex(ProcessError, expected_msg):
                 process.start()
 
     def test_start_starts_listening_thread(self):
         """Tests that start starts the _exec_popen_loop function."""
-        process = Process('cmd')
+        process = Process("cmd")
 
         # Here we need the thread to start the process object.
         class FakeThreadImpl(FakeThread):
             def _on_start(self):
                 process._process = mock.Mock()
 
-        with self.patch('Thread', FakeThreadImpl):
+        with self.patch("Thread", FakeThreadImpl):
             process.start()
 
         self.assertTrue(process._listening_thread.alive)
@@ -118,31 +119,31 @@
     def test_wait_raises_if_called_back_to_back(self):
         """Tests that wait raises an exception if it has already been called
         prior."""
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
 
         process.wait(0)
-        expected_msg = 'Process is already being stopped.'
+        expected_msg = "Process is already being stopped."
         with self.assertRaisesRegex(ProcessError, expected_msg):
             process.wait(0)
 
-    @mock.patch.object(Process, '_kill_process')
+    @mock.patch.object(Process, "_kill_process")
     def test_wait_kills_after_timeout(self, *_):
         """Tests that if a TimeoutExpired error is thrown during wait, the
         process is killed."""
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
-        process._process.wait.side_effect = subprocess.TimeoutExpired('', '')
+        process._process.wait.side_effect = subprocess.TimeoutExpired("", "")
 
         process.wait(0)
 
         self.assertEqual(process._kill_process.called, True)
 
-    @mock.patch('os.getpgid', side_effect=lambda id: id)
-    @mock.patch('os.killpg')
+    @mock.patch("os.getpgid", side_effect=lambda id: id)
+    @mock.patch("os.killpg")
     def test_sends_signal(self, mock_os, *_):
         """Tests that signal is sent to process.."""
-        process = Process('cmd')
+        process = Process("cmd")
         mock_process = mock.Mock()
         mock_process.pid = -1
         process._process = mock_process
@@ -154,16 +155,16 @@
     def test_signal_raises_error_on_windows(self, *_):
         """Tests that signaling is unsupported in windows with appropriate
         error msg."""
-        process = Process('cmd')
+        process = Process("cmd")
         mock_inner_process = mock.Mock()
         mock_inner_process.pid = -1
         process._process = mock_inner_process
 
-        with mock.patch('antlion.libs.proc.process._on_windows', True):
+        with mock.patch("antlion.libs.proc.process._on_windows", True):
             with self.assertRaises(ProcessError):
                 process.signal(51641)
 
-    @mock.patch.object(Process, '_kill_process')
+    @mock.patch.object(Process, "_kill_process")
     def test_wait_sets_stopped_to_true_before_process_kill(self, *_):
         """Tests that stop() sets the _stopped attribute to True.
 
@@ -177,10 +178,10 @@
             self.assertTrue(process._stopped)
             verifier.passed = True
 
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
         process._process.poll.return_value = None
-        process._process.wait.side_effect = subprocess.TimeoutExpired('', '')
+        process._process.wait.side_effect = subprocess.TimeoutExpired("", "")
         process._kill_process = test_call_order
 
         process.wait()
@@ -189,7 +190,7 @@
 
     def test_wait_joins_listening_thread_if_it_exists(self):
         """Tests wait() joins _listening_thread if it exists."""
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
         mocked_thread = mock.Mock()
         process._listening_thread = mocked_thread
@@ -204,7 +205,7 @@
         Threads can only be started once, so after wait has been called, we
         want to make sure we clear the listening thread.
         """
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
         process._listening_thread = mock.Mock()
 
@@ -214,7 +215,7 @@
 
     def test_wait_joins_redirection_thread_if_it_exists(self):
         """Tests wait() joins _listening_thread if it exists."""
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
         mocked_thread = mock.Mock()
         process._redirection_thread = mocked_thread
@@ -229,7 +230,7 @@
         Threads can only be started once, so after wait has been called, we
         want to make sure we clear the listening thread.
         """
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
         process._redirection_thread = mock.Mock()
 
@@ -241,7 +242,7 @@
 
     def test_stop_sets_stopped_to_true(self):
         """Tests that stop() sets the _stopped attribute to True."""
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
 
         process.stop()
@@ -261,11 +262,11 @@
             self.assertTrue(process._stopped)
             verifier.passed = True
 
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
         process._process.poll.return_value = None
         process._kill_process = test_call_order
-        process._process.wait.side_effect = subprocess.TimeoutExpired('', '')
+        process._process.wait.side_effect = subprocess.TimeoutExpired("", "")
 
         process.stop()
 
@@ -273,7 +274,7 @@
 
     def test_stop_calls_wait(self):
         """Tests that stop() also has the functionality of wait()."""
-        process = Process('cmd')
+        process = Process("cmd")
         process._process = mock.Mock()
         process.wait = mock.Mock()
 
@@ -290,79 +291,78 @@
         def appender(line):
             received_list.append(line)
 
-        process = Process('cmd')
+        process = Process("cmd")
         process.set_on_output_callback(appender)
         process._process = mock.Mock()
-        process._process.stdout.readline.side_effect = [b'a\n', b'b\n', b'']
+        process._process.stdout.readline.side_effect = [b"a\n", b"b\n", b""]
 
         process._redirect_output()
 
-        self.assertEqual(received_list[0], 'a')
-        self.assertEqual(received_list[1], 'b')
+        self.assertEqual(received_list[0], "a")
+        self.assertEqual(received_list[1], "b")
         self.assertEqual(len(received_list), 2)
 
     # __start_process
 
     def test_start_process_returns_a_popen_object(self):
         """Tests that a Popen object is returned by __start_process."""
-        with self.patch('subprocess.Popen', return_value='verification'):
-            self.assertEqual(Process._Process__start_process('cmd'),
-                             'verification')
+        with self.patch("subprocess.Popen", return_value="verification"):
+            self.assertEqual(Process._Process__start_process("cmd"), "verification")
 
     # _exec_loop
 
     def test_exec_loop_redirections_output(self):
         """Tests that the _exec_loop function calls to redirect the output."""
-        process = Process('cmd')
+        process = Process("cmd")
         Process._Process__start_process = mock.Mock()
 
-        with self.patch('Thread', FakeThread):
+        with self.patch("Thread", FakeThread):
             process._exec_loop()
 
-        self.assertEqual(process._redirection_thread.target,
-                         process._redirect_output)
+        self.assertEqual(process._redirection_thread.target, process._redirect_output)
         self.assertEqual(process._redirection_thread.alive, True)
 
     def test_exec_loop_waits_for_process(self):
         """Tests that the _exec_loop waits for the process to complete before
         returning."""
-        process = Process('cmd')
+        process = Process("cmd")
         Process._Process__start_process = mock.Mock()
 
-        with self.patch('Thread', FakeThread):
+        with self.patch("Thread", FakeThread):
             process._exec_loop()
 
         self.assertEqual(process._process.wait.called, True)
 
     def test_exec_loop_loops_if_not_stopped(self):
-        process = Process('1st')
+        process = Process("1st")
         Process._Process__start_process = mock.Mock()
-        process._on_terminate_callback = mock.Mock(side_effect=[['2nd'], None])
+        process._on_terminate_callback = mock.Mock(side_effect=[["2nd"], None])
 
-        with self.patch('Thread', FakeThread):
+        with self.patch("Thread", FakeThread):
             process._exec_loop()
 
         self.assertEqual(Process._Process__start_process.call_count, 2)
-        self.assertEqual(Process._Process__start_process.call_args_list[0][0],
-                         (['1st'],))
-        self.assertEqual(Process._Process__start_process.call_args_list[1][0],
-                         (['2nd'],))
+        self.assertEqual(
+            Process._Process__start_process.call_args_list[0][0], (["1st"],)
+        )
+        self.assertEqual(
+            Process._Process__start_process.call_args_list[1][0], (["2nd"],)
+        )
 
     def test_exec_loop_does_not_loop_if_stopped(self):
-        process = Process('1st')
+        process = Process("1st")
         Process._Process__start_process = mock.Mock()
-        process._on_terminate_callback = mock.Mock(
-            side_effect=['2nd', None])
+        process._on_terminate_callback = mock.Mock(side_effect=["2nd", None])
         process._stopped = True
 
-        with self.patch('Thread', FakeThread):
+        with self.patch("Thread", FakeThread):
             process._exec_loop()
 
         self.assertEqual(Process._Process__start_process.call_count, 1)
         self.assertEqual(
-            Process._Process__start_process.call_args_list[0][0],
-            (['1st'],))
+            Process._Process__start_process.call_args_list[0][0], (["1st"],)
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/libs/test_bindings/__init__.py b/src/antlion/unit_tests/libs/test_bindings/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/libs/test_bindings/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/libs/test_bindings/all_tests_decorator_test.py b/src/antlion/unit_tests/libs/test_bindings/all_tests_decorator_test.py
deleted file mode 100644
index 87fb3e5..0000000
--- a/src/antlion/unit_tests/libs/test_bindings/all_tests_decorator_test.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from antlion.libs.test_binding.all_tests_decorator import for_all_tests
-
-
-class AllTestDecoratorTest(unittest.TestCase):
-
-    def test_add_to_all_tests(self):
-
-        def decorator(decorated):
-            def inner(*args, **kwargs):
-                return 3
-
-            return inner
-
-        @for_all_tests(decorator)
-        class TestTest(object):
-            def test_a_thing(self):
-                return 4
-
-            def not_a_test(self):
-                return 4
-
-        test = TestTest()
-        self.assertEqual(test.test_a_thing(), 3)
-        self.assertEqual(test.not_a_test(), 4)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/test_bindings/binding_test.py b/src/antlion/unit_tests/libs/test_bindings/binding_test.py
deleted file mode 100644
index e76969b..0000000
--- a/src/antlion/unit_tests/libs/test_bindings/binding_test.py
+++ /dev/null
@@ -1,191 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-import mock
-
-from antlion import signals
-
-from antlion.libs.test_binding.binding import Binding
-
-
-class BindingTest(unittest.TestCase):
-
-    def test_instance_binding(self):
-        instance = object()
-        binding = Binding(object)
-
-        instance_binding = binding.__get__(instance, None)
-
-        self.assertEqual(instance_binding.instance_args, [instance])
-
-    def test_call_inner(self):
-        self.inner_args = []
-        self.inner_kwargs = {}
-
-        def inner(*args, **kwargs):
-            self.inner_args = args
-            self.inner_kwargs = kwargs
-
-        binding = Binding(inner, instance_args=['test'])
-
-        try:
-            binding('arg', v=2)
-        except signals.TestSignal:
-            pass
-
-        self.assertEqual(self.inner_args, ('test', 'arg'))
-        self.assertEqual(self.inner_kwargs, {'v': 2})
-
-    def test_call_inner_pass_on_none(self):
-
-        def inner(*args, **kwargs):
-            pass
-
-        binding = Binding(inner)
-
-        try:
-            binding()
-        except signals.TestPass:
-            pass
-
-    def test_call_inner_pass_on_true(self):
-
-        def inner(*args, **kwargs):
-            return True
-
-        binding = Binding(inner, instance_args=['test'])
-
-        try:
-            binding()
-        except signals.TestPass:
-            pass
-
-    def test_call_inner_fail_on_false(self):
-
-        def inner(*_, **__):
-            return False
-
-        binding = Binding(inner, instance_args=['test'])
-
-        try:
-            binding()
-        except signals.TestFailure:
-            pass
-
-    def test_call_inner_pass_through_signal(self):
-
-        def inner(*_, **__):
-            raise signals.TestPass('DETAILS')
-
-        binding = Binding(inner, instance_args=['test'])
-
-        try:
-            binding()
-        except signals.TestPass as signal:
-            self.assertEqual(signal.details, 'DETAILS')
-
-    def test_arg_modifier(self):
-        self.inner_args = []
-        self.inner_kwargs = {}
-
-        def arg_modifier(_, *args, **kwargs):
-            new_args = list(args) + ['new arg']
-            new_kwargs = dict(kwargs, kw='value')
-
-            return new_args, new_kwargs
-
-        def inner(*args, **kwargs):
-            self.inner_args = args
-            self.inner_kwargs = kwargs
-
-        binding = Binding(inner, arg_modifier=arg_modifier)
-
-        try:
-            binding('arg', v=2)
-        except signals.TestSignal:
-            pass
-
-        self.assertEqual(self.inner_args, ('arg', 'new arg'))
-        self.assertEqual(self.inner_kwargs, {'v': 2, 'kw': 'value'})
-
-    def test_call_before(self):
-
-        self.has_called_before = False
-
-        def before(*_, **__):
-            self.has_called_before = True
-
-        def inner(*_, **__):
-            self.assertTrue(self.has_called_before)
-
-        binding = Binding(inner, before=before)
-
-        try:
-            binding()
-        except signals.TestSignal:
-            pass
-
-        self.assertTrue(self.has_called_before)
-
-    def test_call_after(self):
-
-        self.has_called_after = False
-
-        def after(*_, **__):
-            self.has_called_after = True
-
-        def inner(*_, **__):
-            self.assertFalse(self.has_called_after)
-
-        binding = Binding(inner, after=after)
-
-        try:
-            binding()
-        except signals.TestSignal:
-            pass
-
-        self.assertTrue(self.has_called_after)
-
-    def test_signal_modify(self):
-
-        def inner(*_, **__):
-            raise signals.TestPass('DETAILS')
-
-        def signal_modifier(_, signal, *__, **___):
-            raise signals.TestFailure(signal.details)
-
-        binding = Binding(inner, signal_modifier=signal_modifier)
-
-        try:
-            binding()
-        except signals.TestFailure as signal:
-            self.assertEqual(signal.details, 'DETAILS')
-
-    def test_inner_attr_proxy_test(self):
-        def some_func():
-            pass
-
-        inner = some_func
-        inner.x = 10
-
-        binding = Binding(inner)
-
-        self.assertEqual(binding.x, inner.x)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/uicd/__init__.py b/src/antlion/unit_tests/libs/uicd/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/libs/uicd/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/libs/uicd/uicd_cli_test.py b/src/antlion/unit_tests/libs/uicd/uicd_cli_test.py
deleted file mode 100644
index b2ac4c3..0000000
--- a/src/antlion/unit_tests/libs/uicd/uicd_cli_test.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import mock
-import unittest
-
-from antlion.libs.uicd.uicd_cli import UicdCli
-from antlion.libs.uicd.uicd_cli import UicdError
-
-_MOCK_WALK = {'/dir1': [('/dir1', (), ('file1', 'file2'))],
-              '/dir2': [('/dir2', ('dir3',), ('file3',)),
-                        ('/dir2/dir3', (), ())],
-              '/dir3': [('/dir3', (), ('file1',))]}
-
-
-def _mock_walk(path, **_):
-    return _MOCK_WALK.get(path, [])
-
-
-class UicdCliTest(unittest.TestCase):
-    """Tests the antlion.libs.uicd.uicd_cli.UicdCli class."""
-
-    # _set_workflows
-
-    @mock.patch('os.walk', _mock_walk)
-    @mock.patch('os.makedirs')
-    @mock.patch('tempfile.mkdtemp')
-    @mock.patch('shutil.rmtree')
-    @mock.patch.object(UicdCli, '_setup_cli')
-    def test_set_workflows_sets_correct_file_path(self, *_):
-        """Tests that the workflow name is mapped correctly to its path."""
-        nc = UicdCli('', '/dir1')
-        self.assertIn('file1', nc._workflows,
-                      'Workflow file not added to dictionary.')
-        self.assertEqual(nc._workflows['file1'], '/dir1/file1',
-                         'Workflow name does not point to the correct path.')
-
-    @mock.patch('os.walk', _mock_walk)
-    @mock.patch('os.makedirs')
-    @mock.patch('tempfile.mkdtemp')
-    @mock.patch('shutil.rmtree')
-    @mock.patch.object(UicdCli, '_setup_cli')
-    def test_set_workflows_adds_workflows_from_directories(self, *_):
-        """Tests that providing a directory name adds all files from that
-        directory. Also tests that no directories are added to the dictionary.
-        """
-        nc = UicdCli('', ['/dir1', '/dir2'])
-        for file_name in ['file1', 'file2', 'file3']:
-            self.assertIn(file_name, nc._workflows,
-                          'Workflow file not added to dictionary.')
-        for dir_name in ['dir1', 'dir2', 'dir3']:
-            self.assertNotIn(dir_name, nc._workflows,
-                             'Directories should not be added to dictionary.')
-
-    @mock.patch('os.walk', _mock_walk)
-    @mock.patch('os.makedirs')
-    @mock.patch('tempfile.mkdtemp')
-    @mock.patch('shutil.rmtree')
-    @mock.patch.object(UicdCli, '_setup_cli')
-    def test_set_workflows_rejects_duplicate_workflow_names(self, *_):
-        """Tests that _set_workflows raises an exception if two or more
-        workflows of the same name are provided.
-        """
-        expected_msg = 'Uicd workflows may not share the same name.'
-        with self.assertRaisesRegex(UicdError, expected_msg):
-            nc = UicdCli('', ['/dir1', '/dir3'])
-
-    # run
-
-    @mock.patch('os.makedirs')
-    @mock.patch('tempfile.mkdtemp', return_value='/base')
-    @mock.patch('shutil.rmtree')
-    @mock.patch.object(UicdCli, '_setup_cli')
-    @mock.patch.object(UicdCli, '_set_workflows')
-    def test_run_generates_correct_uicd_cmds(self, *_):
-        """Tests that the correct cmds are generated upon calling run()."""
-        nc = UicdCli('', [])
-        nc._workflows = {'test': '/workflows/test'}
-        # No log path set
-        with mock.patch('antlion.libs.proc.job.run') as job_run:
-            nc.run('SERIAL', 'test')
-            expected_cmd = 'java -jar /base/uicd-commandline.jar ' \
-                           '-d SERIAL -i /workflows/test'
-            job_run.assert_called_with(expected_cmd.split(), timeout=120)
-        # Log path set
-        nc._log_path = '/logs'
-        with mock.patch('antlion.libs.proc.job.run') as job_run:
-            nc.run('SERIAL', 'test')
-            expected_cmd = 'java -jar /base/uicd-commandline.jar ' \
-                           '-d SERIAL -i /workflows/test -o /logs'
-            job_run.assert_called_with(expected_cmd.split(), timeout=120)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/version_selector_test.py b/src/antlion/unit_tests/libs/version_selector_test.py
deleted file mode 100755
index 7abfbe7..0000000
--- a/src/antlion/unit_tests/libs/version_selector_test.py
+++ /dev/null
@@ -1,300 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import os
-import sys
-
-# A temporary hack to prevent tests/libs/logging from being selected as the
-# python default logging module.
-sys.path[0] = os.path.join(sys.path[0], '../')
-import mock
-import shutil
-import tempfile
-import unittest
-
-from antlion import base_test
-from antlion.libs import version_selector
-
-from mobly.config_parser import TestRunConfig
-
-
-def versioning_decorator(min_sdk, max_sdk):
-    return version_selector.set_version(lambda ret, *_, **__: ret, min_sdk,
-                                        max_sdk)
-
-
-def versioning_decorator2(min_sdk, max_sdk):
-    return version_selector.set_version(lambda ret, *_, **__: ret, min_sdk,
-                                        max_sdk)
-
-
-def test_versioning(min_sdk, max_sdk):
-    return version_selector.set_version(lambda *_, **__: 1, min_sdk, max_sdk)
-
-
-@versioning_decorator(1, 10)
-def versioned_func(arg1, arg2):
-    return 'function 1', arg1, arg2
-
-
-@versioning_decorator(11, 11)
-def versioned_func(arg1, arg2):
-    return 'function 2', arg1, arg2
-
-
-@versioning_decorator(12, 20)
-def versioned_func(arg1, arg2):
-    return 'function 3', arg1, arg2
-
-
-@versioning_decorator(1, 20)
-def versioned_func_with_kwargs(_, asdf='jkl'):
-    return asdf
-
-
-def class_versioning_decorator(min_sdk, max_sdk):
-    return version_selector.set_version(lambda _, ret, *__, **___: ret,
-                                        min_sdk, max_sdk)
-
-
-class VersionedClass(object):
-    @classmethod
-    @class_versioning_decorator(1, 99999999)
-    def class_func(cls, arg1):
-        return cls, arg1
-
-    @staticmethod
-    @versioning_decorator(1, 99999999)
-    def static_func(arg1):
-        return arg1
-
-    @class_versioning_decorator(1, 99999999)
-    def instance_func(self, arg1):
-        return self, arg1
-
-
-class VersionedTestClass(base_test.BaseTestClass):
-    @mock.patch('mobly.utils.create_dir')
-    def __init__(self, configs, _):
-        super().__init__(configs)
-
-    @test_versioning(1, 1)
-    def test_1(self):
-        pass
-
-    @test_versioning(1, 1)
-    def test_2(self):
-        pass
-
-
-class VersionSelectorIntegrationTest(unittest.TestCase):
-    """Tests the antlion.libs.version_selector module."""
-
-    @classmethod
-    def setUpClass(cls):
-        cls.tmp_dir = tempfile.mkdtemp()
-
-    @classmethod
-    def tearDownClass(cls):
-        shutil.rmtree(cls.tmp_dir)
-
-    def test_raises_syntax_error_if_decorated_with_staticmethod_first(self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(1, 1)
-                @staticmethod
-                def test_1():
-                    pass
-        except SyntaxError:
-            pass
-        else:
-            self.fail('Placing the @staticmethod decorator after the '
-                      'versioning decorator should cause a SyntaxError.')
-
-    def test_raises_syntax_error_if_decorated_with_classmethod_first(self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(1, 1)
-                @classmethod
-                def test_1(cls):
-                    pass
-        except SyntaxError:
-            pass
-        else:
-            self.fail('Placing the @classmethod decorator after the '
-                      'versioning decorator should cause a SyntaxError.')
-
-    def test_overriding_an_undecorated_func_raises_a_syntax_error(self):
-        try:
-
-            class SomeClass(object):
-                def test_1(self):
-                    pass
-
-                @versioning_decorator(1, 1)
-                def test_1(self):
-                    pass
-        except SyntaxError:
-            pass
-        else:
-            self.fail('Overwriting a function that already exists without a '
-                      'versioning decorator should raise a SyntaxError.')
-
-    def test_func_decorated_with_2_different_versioning_decorators_causes_error(
-            self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(1, 1)
-                def test_1(self):
-                    pass
-
-                @versioning_decorator2(2, 2)
-                def test_1(self):
-                    pass
-        except SyntaxError:
-            pass
-        else:
-            self.fail('Using two different versioning decorators to version a '
-                      'single function should raise a SyntaxError.')
-
-    def test_func_decorated_with_overlapping_ranges_causes_value_error(self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(1, 2)
-                def test_1(self):
-                    pass
-
-                @versioning_decorator(2, 2)
-                def test_1(self):
-                    pass
-        except ValueError:
-            pass
-        else:
-            self.fail('Decorated functions with overlapping version ranges '
-                      'should raise a ValueError.')
-
-    def test_func_decorated_with_min_gt_max_causes_value_error(self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(2, 1)
-                def test_1(self):
-                    pass
-        except ValueError:
-            pass
-        else:
-            self.fail(
-                'If the min_version level is higher than the max_version '
-                'level, a ValueError should be raised.')
-
-    def test_calling_versioned_func_on_min_version_level_is_inclusive(self):
-        """Tests that calling some versioned function with the minimum version
-        level of the decorated function will call that function."""
-        ret = versioned_func(1, 'some_value')
-        self.assertEqual(
-            ret, ('function 1', 1, 'some_value'),
-            'Calling versioned_func(1, ...) did not return the '
-            'versioned function for the correct range.')
-
-    def test_calling_versioned_func_on_middle_level_works(self):
-        """Tests that calling some versioned function a version value within the
-        range of the decorated function will call that function."""
-        ret = versioned_func(16, 'some_value')
-        self.assertEqual(
-            ret, ('function 3', 16, 'some_value'),
-            'Calling versioned_func(16, ...) did not return the '
-            'versioned function for the correct range.')
-
-    def test_calling_versioned_func_on_max_version_level_is_inclusive(self):
-        """Tests that calling some versioned function with the maximum version
-        level of the decorated function will call that function."""
-        ret = versioned_func(10, 'some_value')
-        self.assertEqual(
-            ret, ('function 1', 10, 'some_value'),
-            'Calling versioned_func(10, ...) did not return the '
-            'versioned function for the correct range.')
-
-    def test_calling_versioned_func_on_min_equals_max_level_works(self):
-        """Tests that calling some versioned function with the maximum version
-        level of the decorated function will call that function."""
-        ret = versioned_func(11, 'some_value')
-        self.assertEqual(
-            ret, ('function 2', 11, 'some_value'),
-            'Calling versioned_func(10, ...) did not return the '
-            'versioned function for the correct range.')
-
-    def test_sending_kwargs_through_decorated_functions_works(self):
-        """Tests that calling some versioned function with the maximum version
-        level of the decorated function will call that function."""
-        ret = versioned_func_with_kwargs(1, asdf='some_value')
-        self.assertEqual(
-            ret, 'some_value',
-            'Calling versioned_func_with_kwargs(1, ...) did not'
-            'return the kwarg value properly.')
-
-    def test_kwargs_can_default_through_decorated_functions(self):
-        """Tests that calling some versioned function with the maximum version
-        level of the decorated function will call that function."""
-        ret = versioned_func_with_kwargs(1)
-        self.assertEqual(
-            ret, 'jkl', 'Calling versioned_func_with_kwargs(1) did not'
-            'return the default kwarg value properly.')
-
-    def test_staticmethod_can_be_called_properly(self):
-        """Tests that decorating a staticmethod will properly send the arguments
-        in the correct order.
-
-        i.e., we want to make sure self or cls do not get sent as the first
-        argument to the decorated staticmethod.
-        """
-        versioned_class = VersionedClass()
-        ret = versioned_class.static_func(123456)
-        self.assertEqual(
-            ret, 123456, 'The first argument was not set properly for calling '
-            'a staticmethod.')
-
-    def test_instance_method_can_be_called_properly(self):
-        """Tests that decorating a method will properly send the arguments
-        in the correct order.
-
-        i.e., we want to make sure self is the first argument returned.
-        """
-        versioned_class = VersionedClass()
-        ret = versioned_class.instance_func(123456)
-        self.assertEqual(
-            ret, (versioned_class, 123456),
-            'The arguments were not set properly for an instance '
-            'method.')
-
-    def test_classmethod_can_be_called_properly(self):
-        """Tests that decorating a classmethod will properly send the arguments
-        in the correct order.
-
-        i.e., we want to make sure cls is the first argument returned.
-        """
-        versioned_class = VersionedClass()
-        ret = versioned_class.class_func(123456)
-        self.assertEqual(
-            ret, (VersionedClass, 123456),
-            'The arguments were not set properly for a '
-            'classmethod.')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/test_runner_test.py b/src/antlion/unit_tests/test_runner_test.py
index 032b36a..4bdc456 100755
--- a/src/antlion/unit_tests/test_runner_test.py
+++ b/src/antlion/unit_tests/test_runner_test.py
@@ -30,13 +30,10 @@
     def setUp(self):
         self.tmp_dir = tempfile.mkdtemp()
         self.base_mock_test_config = TestRunConfig()
-        self.base_mock_test_config.testbed_name = 'SampleTestBed'
+        self.base_mock_test_config.testbed_name = "SampleTestBed"
         self.base_mock_test_config.log_path = self.tmp_dir
-        self.base_mock_test_config.controller_configs = {'testpaths': ['./']}
-        self.base_mock_test_config.user_params = {
-            'icecream': 42,
-            'extra_param': 'haha'
-        }
+        self.base_mock_test_config.controller_configs = {"testpaths": ["./"]}
+        self.base_mock_test_config.user_params = {"icecream": 42, "extra_param": "haha"}
 
     def tearDown(self):
         shutil.rmtree(self.tmp_dir)
@@ -45,13 +42,12 @@
     def create_test_classes(class_names):
         return {class_name: Mock() for class_name in class_names}
 
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
+    @patch("antlion.records.TestResult")
+    @patch.object(test_runner.TestRunner, "_write_results_to_file")
     def test_class_name_pattern_single(self, *_):
-        class_names = ['test_class_1', 'test_class_2']
-        pattern = 'test*1'
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [(pattern, None)])
+        class_names = ["test_class_1", "test_class_2"]
+        pattern = "test*1"
+        tr = test_runner.TestRunner(self.base_mock_test_config, [(pattern, None)])
 
         test_classes = self.create_test_classes(class_names)
         tr.import_test_modules = Mock(return_value=test_classes)
@@ -59,13 +55,12 @@
         self.assertTrue(test_classes[class_names[0]].called)
         self.assertFalse(test_classes[class_names[1]].called)
 
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
+    @patch("antlion.records.TestResult")
+    @patch.object(test_runner.TestRunner, "_write_results_to_file")
     def test_class_name_pattern_multi(self, *_):
-        class_names = ['test_class_1', 'test_class_2', 'other_name']
-        pattern = 'test_class*'
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [(pattern, None)])
+        class_names = ["test_class_1", "test_class_2", "other_name"]
+        pattern = "test_class*"
+        tr = test_runner.TestRunner(self.base_mock_test_config, [(pattern, None)])
 
         test_classes = self.create_test_classes(class_names)
         tr.import_test_modules = Mock(return_value=test_classes)
@@ -74,13 +69,12 @@
         self.assertTrue(test_classes[class_names[1]].called)
         self.assertFalse(test_classes[class_names[2]].called)
 
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
+    @patch("antlion.records.TestResult")
+    @patch.object(test_runner.TestRunner, "_write_results_to_file")
     def test_class_name_pattern_question_mark(self, *_):
-        class_names = ['test_class_1', 'test_class_12']
-        pattern = 'test_class_?'
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [(pattern, None)])
+        class_names = ["test_class_1", "test_class_12"]
+        pattern = "test_class_?"
+        tr = test_runner.TestRunner(self.base_mock_test_config, [(pattern, None)])
 
         test_classes = self.create_test_classes(class_names)
         tr.import_test_modules = Mock(return_value=test_classes)
@@ -88,13 +82,12 @@
         self.assertTrue(test_classes[class_names[0]].called)
         self.assertFalse(test_classes[class_names[1]].called)
 
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
+    @patch("antlion.records.TestResult")
+    @patch.object(test_runner.TestRunner, "_write_results_to_file")
     def test_class_name_pattern_char_seq(self, *_):
-        class_names = ['test_class_1', 'test_class_2', 'test_class_3']
-        pattern = 'test_class_[1357]'
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [(pattern, None)])
+        class_names = ["test_class_1", "test_class_2", "test_class_3"]
+        pattern = "test_class_[1357]"
+        tr = test_runner.TestRunner(self.base_mock_test_config, [(pattern, None)])
 
         test_classes = self.create_test_classes(class_names)
         tr.import_test_modules = Mock(return_value=test_classes)
@@ -103,24 +96,27 @@
         self.assertFalse(test_classes[class_names[1]].called)
         self.assertTrue(test_classes[class_names[2]].called)
 
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, 'dump_config')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
-    @patch('antlion.test_runner.logger')
+    @patch("antlion.records.TestResult")
+    @patch.object(test_runner.TestRunner, "dump_config")
+    @patch.object(test_runner.TestRunner, "_write_results_to_file")
+    @patch("antlion.test_runner.logger")
     def test_class_logpath_contains_proper_directory(self, logger_mock, *_):
-        expected_timestamp = '1970-01-01_00-00-00-00-000000'
+        expected_timestamp = "1970-01-01_00-00-00-00-000000"
         logger_mock.get_log_file_timestamp.return_value = expected_timestamp
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [('MockTest', None)])
+        tr = test_runner.TestRunner(self.base_mock_test_config, [("MockTest", None)])
         mock_class = Mock()
-        tr.import_test_modules = Mock(return_value={'MockTest': mock_class})
+        tr.import_test_modules = Mock(return_value={"MockTest": mock_class})
         tr.run()
 
         self.assertEqual(
             mock_class.call_args_list[0][0][0].log_path,
-            os.path.join(self.tmp_dir, self.base_mock_test_config.testbed_name,
-                         expected_timestamp))
+            os.path.join(
+                self.tmp_dir,
+                self.base_mock_test_config.testbed_name,
+                expected_timestamp,
+            ),
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/src/antlion/unit_tests/test_suite.py b/src/antlion/unit_tests/test_suite.py
index 5d2d1ca..3194587 100755
--- a/src/antlion/unit_tests/test_suite.py
+++ b/src/antlion/unit_tests/test_suite.py
@@ -29,6 +29,7 @@
         test_suite: The unittest.TestSuite used. Useful for debugging.
         test_filename: The *_test.py file that ran in this test.
     """
+
     def __init__(self, test_result, output_file, test_suite, test_filename):
         self.test_result = test_result
         self.output_file = output_file
@@ -42,14 +43,14 @@
     loader = unittest.TestLoader()
     for root, _, files in os.walk(os.path.dirname(__file__)):
         for filename in files:
-            if filename.endswith('_test.py'):
+            if filename.endswith("_test.py"):
                 test_files.append(os.path.join(root, filename))
                 try:
                     suite.addTest(loader.discover(root, filename))
                 except ImportError as e:
-                    if 'Start directory is not importable' not in e.args[0]:
+                    if "Start directory is not importable" not in e.args[0]:
                         raise
-                    message = '. Did you forget to add an __init__.py file?'
+                    message = ". Did you forget to add an __init__.py file?"
                     raise ImportError(e.args[0] + message)
 
     output_dir = tempfile.mkdtemp()
@@ -57,39 +58,41 @@
     results = []
 
     for index, test in enumerate(suite._tests):
-        output_file = os.path.join(output_dir, 'test_%s.output' % index)
+        output_file = os.path.join(output_dir, "test_%s.output" % index)
 
-        test_result = subprocess.Popen([sys.executable, test_files[index]],
-                                       stdout=open(output_file, 'w+'),
-                                       stderr=subprocess.STDOUT)
-        results.append(
-            TestResult(test_result, output_file, test, test_files[index]))
+        test_result = subprocess.Popen(
+            [sys.executable, test_files[index]],
+            stdout=open(output_file, "w+"),
+            stderr=subprocess.STDOUT,
+        )
+        results.append(TestResult(test_result, output_file, test, test_files[index]))
 
     all_failures = []
     for index, result in enumerate(results):
         try:
             failures = result.test_result.wait(timeout=60)
             if failures:
-                print('Failure logs for %s:' % result.test_filename,
-                      file=sys.stderr)
-                with open(result.output_file, 'r') as out_file:
+                print("Failure logs for %s:" % result.test_filename, file=sys.stderr)
+                with open(result.output_file, "r") as out_file:
                     print(out_file.read(), file=sys.stderr)
-                all_failures.append(result.test_filename + ' (failed)')
+                all_failures.append(result.test_filename + " (failed)")
         except subprocess.TimeoutExpired:
-            all_failures.append(result.test_filename + ' (timed out)')
-            print('The following test timed out: %r' % result.test_filename,
-                  file=sys.stderr)
-            with open(result.output_file, 'r') as out_file:
+            all_failures.append(result.test_filename + " (timed out)")
+            print(
+                "The following test timed out: %r" % result.test_filename,
+                file=sys.stderr,
+            )
+            with open(result.output_file, "r") as out_file:
                 print(out_file.read(), file=sys.stderr)
 
     # Prints a summary over all unit tests failed.
     if all_failures:
-        print('The following tests failed:', file=sys.stderr)
+        print("The following tests failed:", file=sys.stderr)
         for failure in all_failures:
-            print('    ', failure, file=sys.stderr)
+            print("    ", failure, file=sys.stderr)
 
     exit(bool(all_failures))
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     run_all_unit_tests()
diff --git a/src/antlion/utils.py b/src/antlion/utils.py
index 5782c84..2e6a03c 100755
--- a/src/antlion/utils.py
+++ b/src/antlion/utils.py
@@ -36,7 +36,11 @@
 import zipfile
 from concurrent.futures import ThreadPoolExecutor
 
-from antlion import signals
+import mobly.keys as mobly_keys
+from typing import Any
+import yaml
+
+from antlion import signals, keys
 from antlion.controllers.adb_lib.error import AdbError
 from antlion.libs.proc import job
 
@@ -45,7 +49,7 @@
 MAX_FILENAME_LEN = 255
 
 # All Fuchsia devices use this suffix for link-local mDNS host names.
-FUCHSIA_MDNS_TYPE = '_fuchsia._udp.local.'
+FUCHSIA_MDNS_TYPE = "_fuchsia._udp.local."
 
 # Default max seconds it takes to Duplicate Address Detection to finish before
 # assigning an IPv6 address.
@@ -58,13 +62,13 @@
 
 class NexusModelNames:
     # TODO(angli): This will be fixed later by angli.
-    ONE = 'sprout'
-    N5 = 'hammerhead'
-    N5v2 = 'bullhead'
-    N6 = 'shamu'
-    N6v2 = 'angler'
-    N6v3 = 'marlin'
-    N5v3 = 'sailfish'
+    ONE = "sprout"
+    N5 = "hammerhead"
+    N5v2 = "bullhead"
+    N6 = "shamu"
+    N6v2 = "angler"
+    N6v3 = "marlin"
+    N5v3 = "sailfish"
 
 
 class DozeModeStatus:
@@ -75,16 +79,30 @@
 ascii_letters_and_digits = string.ascii_letters + string.digits
 valid_filename_chars = "-_." + ascii_letters_and_digits
 
-models = ("sprout", "occam", "hammerhead", "bullhead", "razor", "razorg",
-          "shamu", "angler", "volantis", "volantisg", "mantaray", "fugu",
-          "ryu", "marlin", "sailfish")
+models = (
+    "sprout",
+    "occam",
+    "hammerhead",
+    "bullhead",
+    "razor",
+    "razorg",
+    "shamu",
+    "angler",
+    "volantis",
+    "volantisg",
+    "mantaray",
+    "fugu",
+    "ryu",
+    "marlin",
+    "sailfish",
+)
 
 manufacture_name_to_model = {
     "flo": "razor",
     "flo_lte": "razorg",
     "flounder": "volantis",
     "flounder_lte": "volantisg",
-    "dragon": "ryu"
+    "dragon": "ryu",
 }
 
 GMT_to_olson = {
@@ -112,7 +130,7 @@
     "GMT+12": "Pacific/Fiji",
     "GMT+13": "Pacific/Tongatapu",
     "GMT-11": "Pacific/Midway",
-    "GMT-10": "Pacific/Honolulu"
+    "GMT-10": "Pacific/Honolulu",
 }
 
 
@@ -198,7 +216,7 @@
     if test_bed_controllers:
         device_list = test_bed_controllers
     else:
-        raise ValueError('test_bed_controllers is empty.')
+        raise ValueError("test_bed_controllers is empty.")
     for used_device in used_devices:
         if used_device in device_list:
             device_list.remove(used_device)
@@ -239,7 +257,7 @@
     Returns:
         A JSON object.
     """
-    with open(file_full_path, 'r') as f:
+    with open(file_full_path, "r") as f:
         try:
             return json.load(f)
         except Exception as e:
@@ -248,6 +266,60 @@
             raise
 
 
+def acts_json_to_mobly_yaml(json_path: str) -> str:
+    acts_config = load_config(json_path)
+    mobly_config = acts_to_mobly_config(acts_config)
+    mobly_yaml_path = json_path.rsplit(".json", 1)[0] + ".yaml"
+    with open(mobly_yaml_path, "w") as f:
+        yaml.safe_dump(mobly_config, f)
+    return mobly_yaml_path
+
+
+def acts_to_mobly_config(acts_config: Any) -> Any:
+    """Convert ACTS JSON config to Mobly YAML config.
+
+    Args:
+        acts_config: Full ACTS config as an object
+
+    Returns:
+        Mobly config as an object.
+    """
+    if not acts_config:
+        return acts_config
+
+    mobly_config = {}
+    if keys.Config.key_log_path.value in acts_config:
+        mobly_config[mobly_keys.Config.key_mobly_params.value] = {
+            mobly_keys.Config.key_log_path.value: acts_config[
+                keys.Config.key_log_path.value
+            ],
+        }
+
+    if keys.Config.key_testbed.value in acts_config:
+        testbeds = []
+        for acts_testbed in acts_config[keys.Config.key_testbed.value]:
+            mobly_testbed = {}
+            if keys.Config.key_testbed_name.value in acts_testbed:
+                name = acts_testbed.pop(keys.Config.key_testbed_name.value)
+                mobly_testbed[mobly_keys.Config.key_testbed_name.value] = name
+            mobly_testbed[mobly_keys.Config.key_testbed_test_params.value] = {
+                **{
+                    k: acts_config[k]
+                    for k in acts_config
+                    if k not in keys.Config.reserved_keys.value
+                },
+                **acts_testbed.pop(mobly_keys.Config.key_testbed_test_params.value, {}),
+            }
+            mobly_testbed[
+                mobly_keys.Config.key_testbed_controllers.value
+            ] = acts_testbed
+            testbeds.append(mobly_testbed)
+
+        mobly_config[mobly_keys.Config.key_testbed.value] = testbeds
+
+    return mobly_config
+
+
 def load_file_to_base64_str(f_path):
     """Loads the content of a file into a base64 string.
 
@@ -258,14 +330,14 @@
         A base64 string representing the content of the file in utf-8 encoding.
     """
     path = abs_path(f_path)
-    with open(path, 'rb') as f:
+    with open(path, "rb") as f:
         f_bytes = f.read()
         base64_str = base64.b64encode(f_bytes).decode("utf-8")
         return base64_str
 
 
-def dump_string_to_file(content, file_path, mode='w'):
-    """ Dump content of a string to
+def dump_string_to_file(content, file_path, mode="w"):
+    """Dump content of a string to
 
     Args:
         content: content to be dumped to file
@@ -340,7 +412,7 @@
         The random string generated.
     """
     letters = [random.choice(ascii_letters_and_digits) for i in range(length)]
-    return ''.join(letters)
+    return "".join(letters)
 
 
 def rand_hex_str(length):
@@ -353,7 +425,7 @@
         The random string generated.
     """
     letters = [random.choice(string.hexdigits) for i in range(length)]
-    return ''.join(letters)
+    return "".join(letters)
 
 
 # Thead/Process related functions.
@@ -383,8 +455,11 @@
             try:
                 return_vals.append(future.result())
             except Exception as exc:
-                print("{} generated an exception: {}".format(
-                    params, traceback.format_exc()))
+                print(
+                    "{} generated an exception: {}".format(
+                        params, traceback.format_exc()
+                    )
+                )
                 return_vals.append(exc)
         return return_vals
 
@@ -401,11 +476,10 @@
     Raises:
         OSError is raised if an error occurred during the command execution.
     """
-    cmd = ' '.join(cmds)
-    proc = subprocess.Popen(cmd,
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE,
-                            shell=True)
+    cmd = " ".join(cmds)
+    proc = subprocess.Popen(
+        cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
+    )
     (out, err) = proc.communicate()
     if not err:
         return out
@@ -440,8 +514,10 @@
     ret = proc.poll()
     if ret is not None:
         out, err = proc.communicate()
-        raise ActsUtilsError("Process %d has terminated. ret: %d, stderr: %s,"
-                             " stdout: %s" % (proc.pid, ret, err, out))
+        raise ActsUtilsError(
+            "Process %d has terminated. ret: %d, stderr: %s,"
+            " stdout: %s" % (proc.pid, ret, err, out)
+        )
 
 
 def start_standing_subprocess(cmd, check_health_delay=0, shell=True):
@@ -464,11 +540,13 @@
     Returns:
         The subprocess that got started.
     """
-    proc = subprocess.Popen(cmd,
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE,
-                            shell=shell,
-                            preexec_fn=os.setpgrp)
+    proc = subprocess.Popen(
+        cmd,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        shell=shell,
+        preexec_fn=os.setpgrp,
+    )
     logging.debug("Start standing subprocess with cmd: %s", cmd)
     if check_health_delay > 0:
         time.sleep(check_health_delay)
@@ -536,13 +614,11 @@
 
 # Timeout decorator block
 class TimeoutError(Exception):
-    """Exception for timeout decorator related errors.
-    """
+    """Exception for timeout decorator related errors."""
 
 
 def _timeout_handler(signum, frame):
-    """Handler function used by signal to terminate a timed out function.
-    """
+    """Handler function used by signal to terminate a timed out function."""
     raise TimeoutError()
 
 
@@ -564,7 +640,6 @@
     """
 
     def decorator(func):
-
         @functools.wraps(func)
         def wrapper(*args, **kwargs):
             if sec:
@@ -573,8 +648,11 @@
             try:
                 return func(*args, **kwargs)
             except TimeoutError:
-                raise TimeoutError(("Function {} timed out after {} "
-                                    "seconds.").format(func.__name__, sec))
+                raise TimeoutError(
+                    ("Function {} timed out after {} " "seconds.").format(
+                        func.__name__, sec
+                    )
+                )
             finally:
                 signal.alarm(0)
 
@@ -634,8 +712,9 @@
 
     try:
         wait_for_device_with_timeout(ad)
-        ad.adb.shell("settings put global airplane_mode_on {}".format(
-            1 if new_state else 0))
+        ad.adb.shell(
+            "settings put global airplane_mode_on {}".format(1 if new_state else 0)
+        )
         ad.adb.shell("am broadcast -a android.intent.action.AIRPLANE_MODE")
     except TimeoutError:
         # adb wait for device timeout
@@ -656,7 +735,7 @@
 
 
 def get_device_usb_charging_status(ad):
-    """ Returns the usb charging status of the device.
+    """Returns the usb charging status of the device.
 
     Args:
         ad: android device object
@@ -664,14 +743,14 @@
     Returns:
         True if charging
         False if not charging
-     """
+    """
     adb_shell_result = ad.adb.shell("dumpsys deviceidle get charging")
     ad.log.info("Device Charging State: {}".format(adb_shell_result))
-    return adb_shell_result == 'true'
+    return adb_shell_result == "true"
 
 
 def disable_usb_charging(ad):
-    """ Unplug device from usb charging.
+    """Unplug device from usb charging.
 
     Args:
         ad: android device object
@@ -689,7 +768,7 @@
 
 
 def enable_usb_charging(ad):
-    """ Plug device to usb charging.
+    """Plug device to usb charging.
 
     Args:
         ad: android device object
@@ -723,7 +802,7 @@
     time.sleep(5)
     adb_shell_result = ad.adb.shell("dumpsys deviceidle get deep")
     if not adb_shell_result.startswith(DozeModeStatus.IDLE):
-        info = ("dumpsys deviceidle get deep: {}".format(adb_shell_result))
+        info = "dumpsys deviceidle get deep: {}".format(adb_shell_result)
         print(info)
         return False
     return True
@@ -743,7 +822,7 @@
     ad.adb.shell("dumpsys battery reset")
     adb_shell_result = ad.adb.shell("dumpsys deviceidle get deep")
     if not adb_shell_result.startswith(DozeModeStatus.ACTIVE):
-        info = ("dumpsys deviceidle get deep: {}".format(adb_shell_result))
+        info = "dumpsys deviceidle get deep: {}".format(adb_shell_result)
         print(info)
         return False
     return True
@@ -766,7 +845,7 @@
     ad.adb.shell("cmd deviceidle step light")
     adb_shell_result = ad.adb.shell("dumpsys deviceidle get light")
     if not adb_shell_result.startswith(DozeModeStatus.IDLE):
-        info = ("dumpsys deviceidle get light: {}".format(adb_shell_result))
+        info = "dumpsys deviceidle get light: {}".format(adb_shell_result)
         print(info)
         return False
     return True
@@ -786,7 +865,7 @@
     ad.adb.shell("cmd deviceidle disable light")
     adb_shell_result = ad.adb.shell("dumpsys deviceidle get light")
     if not adb_shell_result.startswith(DozeModeStatus.ACTIVE):
-        info = ("dumpsys deviceidle get light: {}".format(adb_shell_result))
+        info = "dumpsys deviceidle get light: {}".format(adb_shell_result)
         print(info)
         return False
     return True
@@ -799,8 +878,7 @@
         ad: android device object.
         new_state: new state for "Ambient Display". True or False.
     """
-    ad.adb.shell(
-        "settings put secure doze_enabled {}".format(1 if new_state else 0))
+    ad.adb.shell("settings put secure doze_enabled {}".format(1 if new_state else 0))
 
 
 def set_adaptive_brightness(ad, new_state):
@@ -810,8 +888,9 @@
         ad: android device object.
         new_state: new state for "Adaptive Brightness". True or False.
     """
-    ad.adb.shell("settings put system screen_brightness_mode {}".format(
-        1 if new_state else 0))
+    ad.adb.shell(
+        "settings put system screen_brightness_mode {}".format(1 if new_state else 0)
+    )
 
 
 def set_auto_rotate(ad, new_state):
@@ -821,8 +900,9 @@
         ad: android device object.
         new_state: new state for "Auto-rotate". True or False.
     """
-    ad.adb.shell("settings put system accelerometer_rotation {}".format(
-        1 if new_state else 0))
+    ad.adb.shell(
+        "settings put system accelerometer_rotation {}".format(1 if new_state else 0)
+    )
 
 
 def set_location_service(ad, new_state):
@@ -834,12 +914,16 @@
             If new_state is False, turn off location service.
             If new_state if True, set location service to "High accuracy".
     """
-    ad.adb.shell("content insert --uri "
-                 " content://com.google.settings/partner --bind "
-                 "name:s:network_location_opt_in --bind value:s:1")
-    ad.adb.shell("content insert --uri "
-                 " content://com.google.settings/partner --bind "
-                 "name:s:use_location_for_services --bind value:s:1")
+    ad.adb.shell(
+        "content insert --uri "
+        " content://com.google.settings/partner --bind "
+        "name:s:network_location_opt_in --bind value:s:1"
+    )
+    ad.adb.shell(
+        "content insert --uri "
+        " content://com.google.settings/partner --bind "
+        "name:s:use_location_for_services --bind value:s:1"
+    )
     if new_state:
         ad.adb.shell("settings put secure location_mode 3")
     else:
@@ -855,8 +939,9 @@
             if new_state is False, set mobile_data_always_on disabled.
             if new_state if True, set mobile_data_always_on enabled.
     """
-    ad.adb.shell("settings put global mobile_data_always_on {}".format(
-        1 if new_state else 0))
+    ad.adb.shell(
+        "settings put global mobile_data_always_on {}".format(1 if new_state else 0)
+    )
 
 
 def bypass_setup_wizard(ad):
@@ -870,42 +955,47 @@
         False if failed.
     """
     try:
-        ad.adb.shell("am start -n \"com.google.android.setupwizard/"
-                     ".SetupWizardExitActivity\"")
+        ad.adb.shell(
+            'am start -n "com.google.android.setupwizard/' '.SetupWizardExitActivity"'
+        )
         logging.debug("No error during default bypass call.")
     except AdbError as adb_error:
         if adb_error.stdout == "ADB_CMD_OUTPUT:0":
-            if adb_error.stderr and \
-                    not adb_error.stderr.startswith("Error type 3\n"):
-                logging.error("ADB_CMD_OUTPUT:0, but error is %s " %
-                              adb_error.stderr)
+            if adb_error.stderr and not adb_error.stderr.startswith("Error type 3\n"):
+                logging.error("ADB_CMD_OUTPUT:0, but error is %s " % adb_error.stderr)
                 raise adb_error
-            logging.debug("Bypass wizard call received harmless error 3: "
-                          "No setup to bypass.")
+            logging.debug(
+                "Bypass wizard call received harmless error 3: " "No setup to bypass."
+            )
         elif adb_error.stdout == "ADB_CMD_OUTPUT:255":
             # Run it again as root.
             ad.adb.root_adb()
             logging.debug("Need root access to bypass setup wizard.")
             try:
-                ad.adb.shell("am start -n \"com.google.android.setupwizard/"
-                             ".SetupWizardExitActivity\"")
+                ad.adb.shell(
+                    'am start -n "com.google.android.setupwizard/'
+                    '.SetupWizardExitActivity"'
+                )
                 logging.debug("No error during rooted bypass call.")
             except AdbError as adb_error:
                 if adb_error.stdout == "ADB_CMD_OUTPUT:0":
-                    if adb_error.stderr and \
-                            not adb_error.stderr.startswith("Error type 3\n"):
-                        logging.error("Rooted ADB_CMD_OUTPUT:0, but error is "
-                                      "%s " % adb_error.stderr)
+                    if adb_error.stderr and not adb_error.stderr.startswith(
+                        "Error type 3\n"
+                    ):
+                        logging.error(
+                            "Rooted ADB_CMD_OUTPUT:0, but error is "
+                            "%s " % adb_error.stderr
+                        )
                         raise adb_error
                     logging.debug(
                         "Rooted bypass wizard call received harmless "
-                        "error 3: No setup to bypass.")
+                        "error 3: No setup to bypass."
+                    )
 
     # magical sleep to wait for the gservices override broadcast to complete
     time.sleep(3)
 
-    provisioned_state = int(
-        ad.adb.shell("settings get global device_provisioned"))
+    provisioned_state = int(ad.adb.shell("settings get global device_provisioned"))
     if provisioned_state != 1:
         logging.error("Failed to bypass setup wizard.")
         return False
@@ -926,7 +1016,8 @@
         True: if all good
     """
     result = re.search(
-        r"(\d+) packets transmitted, (\d+) received, (\d+)% packet loss", out)
+        r"(\d+) packets transmitted, (\d+) received, (\d+)% packet loss", out
+    )
     if not result:
         ad.log.info("Ping failed with %s", out)
         return False
@@ -935,21 +1026,24 @@
     packet_xmit = int(result.group(1))
     packet_rcvd = int(result.group(2))
     min_packet_xmit_rcvd = (100 - loss_tolerance) * 0.01
-    if (packet_loss > loss_tolerance
-            or packet_xmit < count * min_packet_xmit_rcvd
-            or packet_rcvd < count * min_packet_xmit_rcvd):
-        ad.log.error("%s, ping failed with loss more than tolerance %s%%",
-                     result.group(0), loss_tolerance)
+    if (
+        packet_loss > loss_tolerance
+        or packet_xmit < count * min_packet_xmit_rcvd
+        or packet_rcvd < count * min_packet_xmit_rcvd
+    ):
+        ad.log.error(
+            "%s, ping failed with loss more than tolerance %s%%",
+            result.group(0),
+            loss_tolerance,
+        )
         return False
     ad.log.info("Ping succeed with %s", result.group(0))
     return True
 
 
-def adb_shell_ping(ad,
-                   count=120,
-                   dest_ip="www.google.com",
-                   timeout=200,
-                   loss_tolerance=20):
+def adb_shell_ping(
+    ad, count=120, dest_ip="www.google.com", timeout=200, loss_tolerance=20
+):
     """Ping utility using adb shell.
 
     Args:
@@ -965,8 +1059,7 @@
     if dest_ip:
         ping_cmd += " %s" % dest_ip
     try:
-        ad.log.info("Starting ping test to %s using adb command %s", dest_ip,
-                    ping_cmd)
+        ad.log.info("Starting ping test to %s using adb command %s", dest_ip, ping_cmd)
         out = ad.adb.shell(ping_cmd, timeout=timeout, ignore_status=True)
         if not parse_ping_ouput(ad, count, out, loss_tolerance):
             return False
@@ -985,7 +1078,7 @@
         zip_name: str, name of the generated archive
         src_dir: str, path to the source directory
     """
-    with zipfile.ZipFile(zip_name, 'w', zipfile.ZIP_DEFLATED) as zip:
+    with zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) as zip:
         for root, dirs, files in os.walk(src_dir):
             for file in files:
                 path = os.path.join(root, file)
@@ -999,7 +1092,7 @@
         zip_path: The path to the zipped file.
         extract_location: the directory to extract to.
     """
-    with zipfile.ZipFile(zip_path, 'r') as zip_file:
+    with zipfile.ZipFile(zip_path, "r") as zip_file:
         for info in zip_file.infolist():
             _extract_file(zip_file, info, extract_location)
 
@@ -1039,8 +1132,8 @@
         command_regex: A regex that matches the command line given. Must be
             pgrep compatible.
     """
-    pid = job.run('pgrep -f %s' % command_regex).stdout
-    runtime = ''
+    pid = job.run("pgrep -f %s" % command_regex).stdout
+    runtime = ""
     if pid:
         runtime = job.run('ps -o etime= -p "%s"' % pid).stdout
     return runtime
@@ -1048,8 +1141,8 @@
 
 def get_process_uptime(process):
     """Returns the runtime in [[dd-]hh:]mm:ss, or '' if not running."""
-    pid = job.run('pidof %s' % process, ignore_status=True).stdout
-    runtime = ''
+    pid = job.run("pidof %s" % process, ignore_status=True).stdout
+    runtime = ""
     if pid:
         runtime = job.run('ps -o etime= -p "%s"' % pid).stdout
     return runtime
@@ -1057,8 +1150,8 @@
 
 def get_device_process_uptime(adb, process):
     """Returns the uptime of a device process."""
-    pid = adb.shell('pidof %s' % process, ignore_status=True)
-    runtime = ''
+    pid = adb.shell("pidof %s" % process, ignore_status=True)
+    runtime = ""
     if pid:
         runtime = adb.shell('ps -o etime= -p "%s"' % pid)
     return runtime
@@ -1091,8 +1184,10 @@
         if time.time() > end_time:
             break
         time.sleep(sleep_s)
-    raise TimeoutError('Failed to complete function %s in %d seconds having '
-                       'attempted %d times.' % (str(func), timeout_s, count))
+    raise TimeoutError(
+        "Failed to complete function %s in %d seconds having "
+        "attempted %d times." % (str(func), timeout_s, count)
+    )
 
 
 # Adapted from
@@ -1260,7 +1355,7 @@
     return results
 
 
-def test_concurrent_actions(*calls, failure_exceptions=(Exception, )):
+def test_concurrent_actions(*calls, failure_exceptions=(Exception,)):
     """Concurrently runs all passed in calls using multithreading.
 
     If any callable raises an Exception found within failure_exceptions, the
@@ -1319,8 +1414,11 @@
 
         self._logger = logger
         self._log_levels = log_levels or [
-            logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR,
-            logging.CRITICAL
+            logging.DEBUG,
+            logging.INFO,
+            logging.WARNING,
+            logging.ERROR,
+            logging.CRITICAL,
         ]
         if isinstance(self._log_levels, int):
             self._log_levels = [self._log_levels]
@@ -1339,8 +1437,8 @@
 
 class BlockingTimer(object):
     """Context manager used to block until a specified amount of time has
-     elapsed.
-     """
+    elapsed.
+    """
 
     def __init__(self, secs):
         """Initializes a BlockingTimer
@@ -1366,7 +1464,7 @@
             socket.inet_aton(address)
         except socket.error:
             return False
-        return address.count('.') == 3
+        return address.count(".") == 3
     except socket.error:  # not a valid address
         return False
 
@@ -1374,8 +1472,8 @@
 
 
 def is_valid_ipv6_address(address):
-    if '%' in address:
-        address = address.split('%')[0]
+    if "%" in address:
+        address = address.split("%")[0]
     try:
         socket.inet_pton(socket.AF_INET6, address)
     except socket.error:  # not a valid address
@@ -1384,7 +1482,7 @@
 
 
 def merge_dicts(*dict_args):
-    """ Merges args list of dictionaries into a single dictionary.
+    """Merges args list of dictionaries into a single dictionary.
 
     Args:
         dict_args: an args list of dictionaries to be merged. If multiple
@@ -1399,7 +1497,7 @@
 
 def ascii_string(uc_string):
     """Converts unicode string to ascii"""
-    return str(uc_string).encode('ASCII')
+    return str(uc_string).encode("ASCII")
 
 
 def get_interface_ip_addresses(comm_channel, interface):
@@ -1429,33 +1527,33 @@
         addrs = comm_channel.adb.shell(
             f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
         ).splitlines()
-    elif (type(comm_channel) is SshConnection or is_local):
+    elif type(comm_channel) is SshConnection or is_local:
         addrs = comm_channel.run(
             f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
         ).stdout.splitlines()
     elif type(comm_channel) is FuchsiaDevice:
         interfaces = comm_channel.sl4f.netstack_lib.netstackListInterfaces()
-        err = interfaces.get('error')
+        err = interfaces.get("error")
         if err is not None:
-            raise ActsUtilsError(f'Failed get_interface_ip_addresses: {err}')
+            raise ActsUtilsError(f"Failed get_interface_ip_addresses: {err}")
         addrs = []
-        for item in interfaces.get('result'):
-            if item['name'] != interface:
+        for item in interfaces.get("result"):
+            if item["name"] != interface:
                 continue
-            for ipv4_address in item['ipv4_addresses']:
-                ipv4_address = '.'.join(map(str, ipv4_address))
+            for ipv4_address in item["ipv4_addresses"]:
+                ipv4_address = ".".join(map(str, ipv4_address))
                 addrs.append(ipv4_address)
-            for ipv6_address in item['ipv6_addresses']:
+            for ipv6_address in item["ipv6_addresses"]:
                 converted_ipv6_address = []
                 for octet in ipv6_address:
-                    converted_ipv6_address.append(format(octet, 'x').zfill(2))
-                ipv6_address = ''.join(converted_ipv6_address)
-                ipv6_address = (':'.join(
-                    ipv6_address[i:i + 4]
-                    for i in range(0, len(ipv6_address), 4)))
+                    converted_ipv6_address.append(format(octet, "x").zfill(2))
+                ipv6_address = "".join(converted_ipv6_address)
+                ipv6_address = ":".join(
+                    ipv6_address[i : i + 4] for i in range(0, len(ipv6_address), 4)
+                )
                 addrs.append(str(ipaddress.ip_address(ipv6_address)))
     else:
-        raise ValueError('Unsupported method to send command to device.')
+        raise ValueError("Unsupported method to send command to device.")
 
     ipv4_private_local_addresses = []
     ipv4_public_addresses = []
@@ -1469,13 +1567,14 @@
             if on_device_ip.is_private:
                 ipv4_private_local_addresses.append(str(on_device_ip))
             elif on_device_ip.is_global or (
-                    # Carrier private doesn't have a property, so we check if
-                    # all other values are left unset.
-                    not on_device_ip.is_reserved
-                    and not on_device_ip.is_unspecified
-                    and not on_device_ip.is_link_local
-                    and not on_device_ip.is_loopback
-                    and not on_device_ip.is_multicast):
+                # Carrier private doesn't have a property, so we check if
+                # all other values are left unset.
+                not on_device_ip.is_reserved
+                and not on_device_ip.is_unspecified
+                and not on_device_ip.is_link_local
+                and not on_device_ip.is_loopback
+                and not on_device_ip.is_multicast
+            ):
                 ipv4_public_addresses.append(str(on_device_ip))
         elif on_device_ip.version == 6:
             if on_device_ip.is_link_local:
@@ -1486,11 +1585,11 @@
                 ipv6_public_addresses.append(str(on_device_ip))
 
     return {
-        'ipv4_private': ipv4_private_local_addresses,
-        'ipv4_public': ipv4_public_addresses,
-        'ipv6_link_local': ipv6_link_local_addresses,
-        'ipv6_private_local': ipv6_private_local_addresses,
-        'ipv6_public': ipv6_public_addresses
+        "ipv4_private": ipv4_private_local_addresses,
+        "ipv4_public": ipv4_public_addresses,
+        "ipv6_link_local": ipv6_link_local_addresses,
+        "ipv6_private_local": ipv6_private_local_addresses,
+        "ipv6_public": ipv6_public_addresses,
     }
 
 
@@ -1502,10 +1601,7 @@
     pass
 
 
-def get_addr(comm_channel,
-             interface,
-             addr_type='ipv4_private',
-             timeout_sec=None):
+def get_addr(comm_channel, interface, addr_type="ipv4_private", timeout_sec=None):
     """Get the requested type of IP address for an interface; if an address is
     not available, retry until the timeout has been reached.
 
@@ -1526,9 +1622,9 @@
         MultipleAddresses: Several addresses are available
     """
     if not timeout_sec:
-        if 'ipv4' in addr_type:
+        if "ipv4" in addr_type:
             timeout_sec = 3
-        elif 'ipv6' in addr_type:
+        elif "ipv6" in addr_type:
             timeout_sec = DAD_TIMEOUT_SEC
         else:
             raise signals.TestAbortClass(f'Unknown addr_type "{addr_type}"')
@@ -1537,17 +1633,16 @@
     elapsed = 0
 
     while elapsed <= timeout_sec:
-        ip_addrs = get_interface_ip_addresses(comm_channel,
-                                              interface)[addr_type]
+        ip_addrs = get_interface_ip_addresses(comm_channel, interface)[addr_type]
         if len(ip_addrs) > 1:
             raise MultipleAddresses(
-                f'Expected only one "{addr_type}" address, got {ip_addrs}')
+                f'Expected only one "{addr_type}" address, got {ip_addrs}'
+            )
         elif len(ip_addrs) == 1:
             return ip_addrs[0]
         elapsed = time.time() - start
 
-    raise AddressTimeout(
-        f'No available "{addr_type}" address after {timeout_sec}s')
+    raise AddressTimeout(f'No available "{addr_type}" address after {timeout_sec}s')
 
 
 def get_interface_based_on_ip(comm_channel, desired_ip_address):
@@ -1562,30 +1657,32 @@
         The name of the test interface.
     """
 
-    desired_ip_address = desired_ip_address.split('%', 1)[0]
+    desired_ip_address = desired_ip_address.split("%", 1)[0]
     all_ips_and_interfaces = comm_channel.run(
-        '(ip -o -4 addr show; ip -o -6 addr show) | '
-        'awk \'{print $2" "$4}\'').stdout
-    for ip_address_and_interface in all_ips_and_interfaces.split('\n'):
+        "(ip -o -4 addr show; ip -o -6 addr show) | " "awk '{print $2\" \"$4}'"
+    ).stdout
+    for ip_address_and_interface in all_ips_and_interfaces.split("\n"):
         if desired_ip_address in ip_address_and_interface:
             return ip_address_and_interface.split()[1][:-1]
     return None
 
 
 def renew_linux_ip_address(comm_channel, interface):
-    comm_channel.run('sudo ip link set %s down' % interface)
-    comm_channel.run('sudo ip link set %s up' % interface)
-    comm_channel.run('sudo dhclient -r %s' % interface)
-    comm_channel.run('sudo dhclient %s' % interface)
+    comm_channel.run("sudo ip link set %s down" % interface)
+    comm_channel.run("sudo ip link set %s up" % interface)
+    comm_channel.run("sudo dhclient -r %s" % interface)
+    comm_channel.run("sudo dhclient %s" % interface)
 
 
-def get_ping_command(dest_ip,
-                     count=3,
-                     interval=1000,
-                     timeout=1000,
-                     size=56,
-                     os_type='Linux',
-                     additional_ping_params=None):
+def get_ping_command(
+    dest_ip,
+    count=3,
+    interval=1000,
+    timeout=1000,
+    size=56,
+    os_type="Linux",
+    additional_ping_params=None,
+):
     """Builds ping command string based on address type, os, and params.
 
     Args:
@@ -1603,45 +1700,52 @@
         List of string, represetning the ping command.
     """
     if is_valid_ipv4_address(dest_ip):
-        ping_binary = 'ping'
+        ping_binary = "ping"
     elif is_valid_ipv6_address(dest_ip):
-        ping_binary = 'ping6'
+        ping_binary = "ping6"
     else:
-        raise ValueError('Invalid ip addr: %s' % dest_ip)
+        raise ValueError("Invalid ip addr: %s" % dest_ip)
 
-    if os_type == 'Darwin':
+    if os_type == "Darwin":
         if is_valid_ipv6_address(dest_ip):
             # ping6 on MacOS doesn't support timeout
-            logging.debug(
-                'Ignoring timeout, as ping6 on MacOS does not support it.')
+            logging.debug("Ignoring timeout, as ping6 on MacOS does not support it.")
             timeout_flag = []
         else:
-            timeout_flag = ['-t', str(timeout / 1000)]
-    elif os_type == 'Linux':
-        timeout_flag = ['-W', str(timeout / 1000)]
+            timeout_flag = ["-t", str(timeout / 1000)]
+    elif os_type == "Linux":
+        timeout_flag = ["-W", str(timeout / 1000)]
     else:
-        raise ValueError('Invalid OS.  Only Linux and MacOS are supported.')
+        raise ValueError("Invalid OS.  Only Linux and MacOS are supported.")
 
     if not additional_ping_params:
-        additional_ping_params = ''
+        additional_ping_params = ""
 
     ping_cmd = [
-        ping_binary, *timeout_flag, '-c',
-        str(count), '-i',
-        str(interval / 1000), '-s',
-        str(size), additional_ping_params, dest_ip
+        ping_binary,
+        *timeout_flag,
+        "-c",
+        str(count),
+        "-i",
+        str(interval / 1000),
+        "-s",
+        str(size),
+        additional_ping_params,
+        dest_ip,
     ]
-    return ' '.join(ping_cmd)
+    return " ".join(ping_cmd)
 
 
-def ping(comm_channel,
-         dest_ip,
-         count=3,
-         interval=1000,
-         timeout=1000,
-         size=56,
-         additional_ping_params=None):
-    """ Generic linux ping function, supports local (acts.libs.proc.job) and
+def ping(
+    comm_channel,
+    dest_ip,
+    count=3,
+    interval=1000,
+    timeout=1000,
+    size=56,
+    additional_ping_params=None,
+):
+    """Generic linux ping function, supports local (acts.libs.proc.job) and
     SshConnections (acts.libs.proc.job over ssh) to Linux based OSs and MacOS.
 
     NOTES: This will work with Android over SSH, but does not function over ADB
@@ -1677,23 +1781,27 @@
         Any values that cannot be parsed are left as None
     """
     from antlion.controllers.utils_lib.ssh.connection import SshConnection
-    is_local = comm_channel == job
-    os_type = platform.system() if is_local else 'Linux'
-    ping_cmd = get_ping_command(dest_ip,
-                                count=count,
-                                interval=interval,
-                                timeout=timeout,
-                                size=size,
-                                os_type=os_type,
-                                additional_ping_params=additional_ping_params)
 
-    if (type(comm_channel) is SshConnection or is_local):
+    is_local = comm_channel == job
+    os_type = platform.system() if is_local else "Linux"
+    ping_cmd = get_ping_command(
+        dest_ip,
+        count=count,
+        interval=interval,
+        timeout=timeout,
+        size=size,
+        os_type=os_type,
+        additional_ping_params=additional_ping_params,
+    )
+
+    if type(comm_channel) is SshConnection or is_local:
         logging.debug(
-            'Running ping with parameters (count: %s, interval: %s, timeout: '
-            '%s, size: %s)' % (count, interval, timeout, size))
+            "Running ping with parameters (count: %s, interval: %s, timeout: "
+            "%s, size: %s)" % (count, interval, timeout, size)
+        )
         ping_result = comm_channel.run(ping_cmd, ignore_status=True)
     else:
-        raise ValueError('Unsupported comm_channel: %s' % type(comm_channel))
+        raise ValueError("Unsupported comm_channel: %s" % type(comm_channel))
 
     if isinstance(ping_result, job.Error):
         ping_result = ping_result.result
@@ -1708,16 +1816,19 @@
     rtt_mdev = None
 
     summary = re.search(
-        '([0-9]+) packets transmitted.*?([0-9]+) received.*?([0-9]+)% packet '
-        'loss.*?time ([0-9]+)', ping_result.stdout)
+        "([0-9]+) packets transmitted.*?([0-9]+) received.*?([0-9]+)% packet "
+        "loss.*?time ([0-9]+)",
+        ping_result.stdout,
+    )
     if summary:
         transmitted = summary[1]
         received = summary[2]
         packet_loss = summary[3]
         time = summary[4]
 
-    rtt_stats = re.search('= ([0-9.]+)/([0-9.]+)/([0-9.]+)/([0-9.]+)',
-                          ping_result.stdout)
+    rtt_stats = re.search(
+        "= ([0-9.]+)/([0-9.]+)/([0-9.]+)/([0-9.]+)", ping_result.stdout
+    )
     if rtt_stats:
         rtt_min = rtt_stats[1]
         rtt_avg = rtt_stats[2]
@@ -1725,39 +1836,43 @@
         rtt_mdev = rtt_stats[4]
 
     return {
-        'command': ping_result.command,
-        'exit_status': ping_result.exit_status,
-        'stdout': ping_result.stdout,
-        'stderr': ping_result.stderr,
-        'transmitted': transmitted,
-        'received': received,
-        'packet_loss': packet_loss,
-        'time': time,
-        'rtt_min': rtt_min,
-        'rtt_avg': rtt_avg,
-        'rtt_max': rtt_max,
-        'rtt_mdev': rtt_mdev
+        "command": ping_result.command,
+        "exit_status": ping_result.exit_status,
+        "stdout": ping_result.stdout,
+        "stderr": ping_result.stderr,
+        "transmitted": transmitted,
+        "received": received,
+        "packet_loss": packet_loss,
+        "time": time,
+        "rtt_min": rtt_min,
+        "rtt_avg": rtt_avg,
+        "rtt_max": rtt_max,
+        "rtt_mdev": rtt_mdev,
     }
 
 
-def can_ping(comm_channel,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=56,
-             additional_ping_params=None):
+def can_ping(
+    comm_channel,
+    dest_ip,
+    count=3,
+    interval=1000,
+    timeout=1000,
+    size=56,
+    additional_ping_params=None,
+):
     """Returns whether device connected via comm_channel can ping a dest
     address"""
-    ping_results = ping(comm_channel,
-                        dest_ip,
-                        count=count,
-                        interval=interval,
-                        timeout=timeout,
-                        size=size,
-                        additional_ping_params=additional_ping_params)
+    ping_results = ping(
+        comm_channel,
+        dest_ip,
+        count=count,
+        interval=interval,
+        timeout=timeout,
+        size=size,
+        additional_ping_params=additional_ping_params,
+    )
 
-    return ping_results['exit_status'] == 0
+    return ping_results["exit_status"] == 0
 
 
 def ip_in_subnet(ip, subnet):
@@ -1784,7 +1899,7 @@
         list, representing mac address octets in decimal
             e.g. [18, 52, 86, 120, 154, 188]
     """
-    return [int(octet, 16) for octet in mac_addr_str.split(':')]
+    return [int(octet, 16) for octet in mac_addr_str.split(":")]
 
 
 def mac_address_list_to_str(mac_addr_list):
@@ -1802,11 +1917,11 @@
     for octet in mac_addr_list:
         hex_octet = hex(octet)[2:]
         if octet < 16:
-            hex_list.append('0%s' % hex_octet)
+            hex_list.append("0%s" % hex_octet)
         else:
             hex_list.append(hex_octet)
 
-    return ':'.join(hex_list)
+    return ":".join(hex_list)
 
 
 def get_fuchsia_mdns_ipv6_address(device_mdns_name):
@@ -1830,10 +1945,9 @@
             f'Sending mDNS query for device "{device_mdns_name}" using "{address}"'
         )
         try:
-            zeroconf = Zeroconf(ip_version=IPVersion.V6Only,
-                                interfaces=[address])
+            zeroconf = Zeroconf(ip_version=IPVersion.V6Only, interfaces=[address])
         except RuntimeError as e:
-            if 'No adapter found for IP address' in e.args[0]:
+            if "No adapter found for IP address" in e.args[0]:
                 # Most likely, a device went offline and its control
                 # interface was deleted. This is acceptable since the
                 # device that went offline isn't guaranteed to be the
@@ -1843,17 +1957,21 @@
             raise
 
         device_records = zeroconf.get_service_info(
-            FUCHSIA_MDNS_TYPE, device_mdns_name + '.' + FUCHSIA_MDNS_TYPE)
+            FUCHSIA_MDNS_TYPE, device_mdns_name + "." + FUCHSIA_MDNS_TYPE
+        )
 
         if device_records:
             for device_address in device_records.parsed_addresses():
                 device_ip_address = ipaddress.ip_address(device_address)
-                scoped_address = '%s%%%s' % (device_address, interface)
-                if (device_ip_address.version == 6
-                        and device_ip_address.is_link_local
-                        and can_ping(job, dest_ip=scoped_address)):
-                    logging.info('Found device "%s" at "%s"' %
-                                 (device_mdns_name, scoped_address))
+                scoped_address = "%s%%%s" % (device_address, interface)
+                if (
+                    device_ip_address.version == 6
+                    and device_ip_address.is_link_local
+                    and can_ping(job, dest_ip=scoped_address)
+                ):
+                    logging.info(
+                        'Found device "%s" at "%s"' % (device_mdns_name, scoped_address)
+                    )
                     zeroconf.close()
                     del zeroconf
                     return scoped_address
@@ -1868,19 +1986,20 @@
         interfaces = psutil.net_if_addrs()
         for interface in interfaces:
             for addr in interfaces[interface]:
-                address = addr.address.split('%')[0]
-                if addr.family == socket.AF_INET6 and ipaddress.ip_address(
-                        address).is_link_local and address != 'fe80::1':
-                    futures.append(
-                        executor.submit(mdns_query, interface, address))
+                address = addr.address.split("%")[0]
+                if (
+                    addr.family == socket.AF_INET6
+                    and ipaddress.ip_address(address).is_link_local
+                    and address != "fe80::1"
+                ):
+                    futures.append(executor.submit(mdns_query, interface, address))
 
         for future in futures:
             addr = future.result()
             if addr:
                 return addr
 
-    logging.error('Unable to find IP address for device "%s"' %
-                  device_mdns_name)
+    logging.error('Unable to find IP address for device "%s"' % device_mdns_name)
     return None
 
 
@@ -1907,11 +2026,10 @@
         matched.
     """
     if not devices:
-        raise ValueError('No devices available')
+        raise ValueError("No devices available")
 
     matches = [
-        d for d in devices
-        if hasattr(d, 'device_type') and d.device_type == device_type
+        d for d in devices if hasattr(d, "device_type") and d.device_type == device_type
     ]
 
     if len(matches) == 0:
@@ -1922,7 +2040,7 @@
         # Specifing multiple devices with the same "device_type" is a
         # configuration error.
         raise ValueError(
-            'More than one device matching "device_type" == "{}"'.format(
-                device_type))
+            'More than one device matching "device_type" == "{}"'.format(device_type)
+        )
 
     return matches[0]
diff --git a/third_party/github.com/jd/tenacity/BUILD.gn b/third_party/github.com/jd/tenacity/BUILD.gn
new file mode 100644
index 0000000..a0f0dc7
--- /dev/null
+++ b/third_party/github.com/jd/tenacity/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/python/python_library.gni")
+
+python_library("tenacity") {
+  source_root = "//third_party/github.com/jd/tenacity/src/tenacity"
+  sources = [
+    "__init__.py",
+    "_asyncio.py",
+    "_utils.py",
+    "after.py",
+    "before.py",
+    "before_sleep.py",
+    "nap.py",
+    "retry.py",
+    "stop.py",
+    "tornadoweb.py",
+    "wait.py",
+  ]
+}
diff --git a/third_party/github.com/jd/tenacity/OWNERS b/third_party/github.com/jd/tenacity/OWNERS
new file mode 100644
index 0000000..1f83792
--- /dev/null
+++ b/third_party/github.com/jd/tenacity/OWNERS
@@ -0,0 +1,2 @@
+sbalana@google.com
+patricklu@google.com
diff --git a/third_party/github.com/jd/tenacity/README.fuchsia b/third_party/github.com/jd/tenacity/README.fuchsia
new file mode 100644
index 0000000..6b0076d
--- /dev/null
+++ b/third_party/github.com/jd/tenacity/README.fuchsia
@@ -0,0 +1,9 @@
+Name: tenacity
+License: Apache 2.0
+License File: LICENSE
+Upstream Git: https://github.com/jd/tenacity
+Description:
+
+Tenacity is an Apache 2.0 licensed general-purpose retrying library,
+written in Python, to simplify the task of adding retry behavior to
+just about anything.